blob: f06e0c3a393580dae4481ac8a0a261258564c057 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Chien-Yu Chen3b630e52017-06-02 15:39:47 -0700136// TODO: Enable HDR+ for front camera after it's supported. b/37100623.
137#define ENABLE_HDRPLUS_FOR_FRONT_CAMERA 0
138
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700139// Whether to check for the GPU stride padding, or use the default
140//#define CHECK_GPU_PIXEL_ALIGNMENT
141
Thierry Strudel3d639192016-09-09 11:52:26 -0700142cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
143const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
144extern pthread_mutex_t gCamLock;
145volatile uint32_t gCamHal3LogLevel = 1;
146extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800148// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149// The following Easel related variables must be protected by gHdrPlusClientLock.
150EaselManagerClient gEaselManagerClient;
151bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
152std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
153bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700154bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700155bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800157// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
158bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700159
160Mutex gHdrPlusClientLock; // Protect above Easel related variables.
161
Thierry Strudel3d639192016-09-09 11:52:26 -0700162
163const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
164 {"On", CAM_CDS_MODE_ON},
165 {"Off", CAM_CDS_MODE_OFF},
166 {"Auto",CAM_CDS_MODE_AUTO}
167};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700168const QCamera3HardwareInterface::QCameraMap<
169 camera_metadata_enum_android_video_hdr_mode_t,
170 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
171 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
172 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
173};
174
Thierry Strudel54dc9782017-02-15 12:12:10 -0800175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_binning_correction_mode_t,
177 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
178 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
179 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
180};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700181
182const QCamera3HardwareInterface::QCameraMap<
183 camera_metadata_enum_android_ir_mode_t,
184 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
185 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
186 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
187 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
188};
Thierry Strudel3d639192016-09-09 11:52:26 -0700189
190const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_effect_mode_t,
192 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
193 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
194 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
195 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
196 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
197 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
198 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
199 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
202};
203
204const QCamera3HardwareInterface::QCameraMap<
205 camera_metadata_enum_android_control_awb_mode_t,
206 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
207 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
208 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
209 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
210 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
211 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
212 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
215 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
216};
217
218const QCamera3HardwareInterface::QCameraMap<
219 camera_metadata_enum_android_control_scene_mode_t,
220 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
221 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
222 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
223 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
227 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
228 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
229 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
230 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
231 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
232 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
233 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
234 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
235 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800236 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
237 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_af_mode_t,
242 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
245 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
246 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
247 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_color_correction_aberration_mode_t,
254 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
255 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
256 CAM_COLOR_CORRECTION_ABERRATION_OFF },
257 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
258 CAM_COLOR_CORRECTION_ABERRATION_FAST },
259 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
260 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_control_ae_antibanding_mode_t,
265 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
270};
271
272const QCamera3HardwareInterface::QCameraMap<
273 camera_metadata_enum_android_control_ae_mode_t,
274 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
275 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
278 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
279 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
280};
281
282const QCamera3HardwareInterface::QCameraMap<
283 camera_metadata_enum_android_flash_mode_t,
284 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
285 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
286 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
287 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
288};
289
290const QCamera3HardwareInterface::QCameraMap<
291 camera_metadata_enum_android_statistics_face_detect_mode_t,
292 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
293 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
296};
297
298const QCamera3HardwareInterface::QCameraMap<
299 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
300 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
301 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
302 CAM_FOCUS_UNCALIBRATED },
303 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
304 CAM_FOCUS_APPROXIMATE },
305 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
306 CAM_FOCUS_CALIBRATED }
307};
308
309const QCamera3HardwareInterface::QCameraMap<
310 camera_metadata_enum_android_lens_state_t,
311 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
312 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
313 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
314};
315
316const int32_t available_thumbnail_sizes[] = {0, 0,
317 176, 144,
318 240, 144,
319 256, 144,
320 240, 160,
321 256, 154,
322 240, 240,
323 320, 240};
324
325const QCamera3HardwareInterface::QCameraMap<
326 camera_metadata_enum_android_sensor_test_pattern_mode_t,
327 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
334};
335
336/* Since there is no mapping for all the options some Android enum are not listed.
337 * Also, the order in this list is important because while mapping from HAL to Android it will
338 * traverse from lower to higher index which means that for HAL values that are map to different
339 * Android values, the traverse logic will select the first one found.
340 */
341const QCamera3HardwareInterface::QCameraMap<
342 camera_metadata_enum_android_sensor_reference_illuminant1_t,
343 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
360};
361
362const QCamera3HardwareInterface::QCameraMap<
363 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
364 { 60, CAM_HFR_MODE_60FPS},
365 { 90, CAM_HFR_MODE_90FPS},
366 { 120, CAM_HFR_MODE_120FPS},
367 { 150, CAM_HFR_MODE_150FPS},
368 { 180, CAM_HFR_MODE_180FPS},
369 { 210, CAM_HFR_MODE_210FPS},
370 { 240, CAM_HFR_MODE_240FPS},
371 { 480, CAM_HFR_MODE_480FPS},
372};
373
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700374const QCamera3HardwareInterface::QCameraMap<
375 qcamera3_ext_instant_aec_mode_t,
376 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
377 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
378 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
380};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800381
382const QCamera3HardwareInterface::QCameraMap<
383 qcamera3_ext_exposure_meter_mode_t,
384 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
385 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
386 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
387 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
388 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
389 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
390 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
391 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
392};
393
394const QCamera3HardwareInterface::QCameraMap<
395 qcamera3_ext_iso_mode_t,
396 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
397 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
398 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
399 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
400 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
401 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
402 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
403 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
404 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
405};
406
Thierry Strudel3d639192016-09-09 11:52:26 -0700407camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
408 .initialize = QCamera3HardwareInterface::initialize,
409 .configure_streams = QCamera3HardwareInterface::configure_streams,
410 .register_stream_buffers = NULL,
411 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
412 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
413 .get_metadata_vendor_tag_ops = NULL,
414 .dump = QCamera3HardwareInterface::dump,
415 .flush = QCamera3HardwareInterface::flush,
416 .reserved = {0},
417};
418
419// initialise to some default value
420uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
421
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700422static inline void logEaselEvent(const char *tag, const char *event) {
423 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
424 struct timespec ts = {};
425 static int64_t kMsPerSec = 1000;
426 static int64_t kNsPerMs = 1000000;
427 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
428 if (res != OK) {
429 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
430 } else {
431 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
432 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
433 }
434 }
435}
436
Thierry Strudel3d639192016-09-09 11:52:26 -0700437/*===========================================================================
438 * FUNCTION : QCamera3HardwareInterface
439 *
440 * DESCRIPTION: constructor of QCamera3HardwareInterface
441 *
442 * PARAMETERS :
443 * @cameraId : camera ID
444 *
445 * RETURN : none
446 *==========================================================================*/
447QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
448 const camera_module_callbacks_t *callbacks)
449 : mCameraId(cameraId),
450 mCameraHandle(NULL),
451 mCameraInitialized(false),
452 mCallbackOps(NULL),
453 mMetadataChannel(NULL),
454 mPictureChannel(NULL),
455 mRawChannel(NULL),
456 mSupportChannel(NULL),
457 mAnalysisChannel(NULL),
458 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700459 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700460 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800461 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800462 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700463 mChannelHandle(0),
464 mFirstConfiguration(true),
465 mFlush(false),
466 mFlushPerf(false),
467 mParamHeap(NULL),
468 mParameters(NULL),
469 mPrevParameters(NULL),
470 m_bIsVideo(false),
471 m_bIs4KVideo(false),
472 m_bEisSupportedSize(false),
473 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800474 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700475 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700476 mShutterDispatcher(this),
477 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 mMinProcessedFrameDuration(0),
479 mMinJpegFrameDuration(0),
480 mMinRawFrameDuration(0),
481 mMetaFrameCount(0U),
482 mUpdateDebugLevel(false),
483 mCallbacks(callbacks),
484 mCaptureIntent(0),
485 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700486 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800487 /* DevCamDebug metadata internal m control*/
488 mDevCamDebugMetaEnable(0),
489 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700490 mBatchSize(0),
491 mToBeQueuedVidBufs(0),
492 mHFRVideoFps(DEFAULT_VIDEO_FPS),
493 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800494 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800495 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700496 mFirstFrameNumberInBatch(0),
497 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800498 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700499 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
500 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000501 mPDSupported(false),
502 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700503 mInstantAEC(false),
504 mResetInstantAEC(false),
505 mInstantAECSettledFrameNumber(0),
506 mAecSkipDisplayFrameBound(0),
507 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800508 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700509 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700510 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700511 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mState(CLOSED),
513 mIsDeviceLinked(false),
514 mIsMainCamera(true),
515 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700516 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800517 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800518 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700519 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800520 mIsApInputUsedForHdrPlus(false),
521 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800522 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700523{
524 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700525 mCommon.init(gCamCapability[cameraId]);
526 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700527#ifndef USE_HAL_3_3
528 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
529#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700530 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700531#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700532 mCameraDevice.common.close = close_camera_device;
533 mCameraDevice.ops = &mCameraOps;
534 mCameraDevice.priv = this;
535 gCamCapability[cameraId]->version = CAM_HAL_V3;
536 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
537 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
538 gCamCapability[cameraId]->min_num_pp_bufs = 3;
539
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800540 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700541
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800542 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700543 mPendingLiveRequest = 0;
544 mCurrentRequestId = -1;
545 pthread_mutex_init(&mMutex, NULL);
546
547 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
548 mDefaultMetadata[i] = NULL;
549
550 // Getting system props of different kinds
551 char prop[PROPERTY_VALUE_MAX];
552 memset(prop, 0, sizeof(prop));
553 property_get("persist.camera.raw.dump", prop, "0");
554 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800555 property_get("persist.camera.hal3.force.hdr", prop, "0");
556 mForceHdrSnapshot = atoi(prop);
557
Thierry Strudel3d639192016-09-09 11:52:26 -0700558 if (mEnableRawDump)
559 LOGD("Raw dump from Camera HAL enabled");
560
561 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
562 memset(mLdafCalib, 0, sizeof(mLdafCalib));
563
564 memset(prop, 0, sizeof(prop));
565 property_get("persist.camera.tnr.preview", prop, "0");
566 m_bTnrPreview = (uint8_t)atoi(prop);
567
568 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800569 property_get("persist.camera.swtnr.preview", prop, "1");
570 m_bSwTnrPreview = (uint8_t)atoi(prop);
571
572 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700573 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700574 m_bTnrVideo = (uint8_t)atoi(prop);
575
576 memset(prop, 0, sizeof(prop));
577 property_get("persist.camera.avtimer.debug", prop, "0");
578 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800579 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700580
Thierry Strudel54dc9782017-02-15 12:12:10 -0800581 memset(prop, 0, sizeof(prop));
582 property_get("persist.camera.cacmode.disable", prop, "0");
583 m_cacModeDisabled = (uint8_t)atoi(prop);
584
Thierry Strudel3d639192016-09-09 11:52:26 -0700585 //Load and read GPU library.
586 lib_surface_utils = NULL;
587 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700588 mSurfaceStridePadding = CAM_PAD_TO_64;
589#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700590 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
591 if (lib_surface_utils) {
592 *(void **)&LINK_get_surface_pixel_alignment =
593 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
594 if (LINK_get_surface_pixel_alignment) {
595 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
596 }
597 dlclose(lib_surface_utils);
598 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700599#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000600 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
601 mPDSupported = (0 <= mPDIndex) ? true : false;
602
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700603 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700604}
605
606/*===========================================================================
607 * FUNCTION : ~QCamera3HardwareInterface
608 *
609 * DESCRIPTION: destructor of QCamera3HardwareInterface
610 *
611 * PARAMETERS : none
612 *
613 * RETURN : none
614 *==========================================================================*/
615QCamera3HardwareInterface::~QCamera3HardwareInterface()
616{
617 LOGD("E");
618
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800619 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700620
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800621 // Disable power hint and enable the perf lock for close camera
622 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
623 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
624
625 // unlink of dualcam during close camera
626 if (mIsDeviceLinked) {
627 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
628 &m_pDualCamCmdPtr->bundle_info;
629 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
630 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
631 pthread_mutex_lock(&gCamLock);
632
633 if (mIsMainCamera == 1) {
634 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
635 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
636 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
637 // related session id should be session id of linked session
638 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
639 } else {
640 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
641 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
642 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
643 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
644 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800645 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800646 pthread_mutex_unlock(&gCamLock);
647
648 rc = mCameraHandle->ops->set_dual_cam_cmd(
649 mCameraHandle->camera_handle);
650 if (rc < 0) {
651 LOGE("Dualcam: Unlink failed, but still proceed to close");
652 }
653 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700654
655 /* We need to stop all streams before deleting any stream */
656 if (mRawDumpChannel) {
657 mRawDumpChannel->stop();
658 }
659
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700660 if (mHdrPlusRawSrcChannel) {
661 mHdrPlusRawSrcChannel->stop();
662 }
663
Thierry Strudel3d639192016-09-09 11:52:26 -0700664 // NOTE: 'camera3_stream_t *' objects are already freed at
665 // this stage by the framework
666 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
667 it != mStreamInfo.end(); it++) {
668 QCamera3ProcessingChannel *channel = (*it)->channel;
669 if (channel) {
670 channel->stop();
671 }
672 }
673 if (mSupportChannel)
674 mSupportChannel->stop();
675
676 if (mAnalysisChannel) {
677 mAnalysisChannel->stop();
678 }
679 if (mMetadataChannel) {
680 mMetadataChannel->stop();
681 }
682 if (mChannelHandle) {
683 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
684 mChannelHandle);
685 LOGD("stopping channel %d", mChannelHandle);
686 }
687
688 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
689 it != mStreamInfo.end(); it++) {
690 QCamera3ProcessingChannel *channel = (*it)->channel;
691 if (channel)
692 delete channel;
693 free (*it);
694 }
695 if (mSupportChannel) {
696 delete mSupportChannel;
697 mSupportChannel = NULL;
698 }
699
700 if (mAnalysisChannel) {
701 delete mAnalysisChannel;
702 mAnalysisChannel = NULL;
703 }
704 if (mRawDumpChannel) {
705 delete mRawDumpChannel;
706 mRawDumpChannel = NULL;
707 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700708 if (mHdrPlusRawSrcChannel) {
709 delete mHdrPlusRawSrcChannel;
710 mHdrPlusRawSrcChannel = NULL;
711 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700712 if (mDummyBatchChannel) {
713 delete mDummyBatchChannel;
714 mDummyBatchChannel = NULL;
715 }
716
717 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800718 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700719
720 if (mMetadataChannel) {
721 delete mMetadataChannel;
722 mMetadataChannel = NULL;
723 }
724
725 /* Clean up all channels */
726 if (mCameraInitialized) {
727 if(!mFirstConfiguration){
728 //send the last unconfigure
729 cam_stream_size_info_t stream_config_info;
730 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
731 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
732 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800733 m_bIs4KVideo ? 0 :
734 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700735 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700736 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
737 stream_config_info);
738 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
739 if (rc < 0) {
740 LOGE("set_parms failed for unconfigure");
741 }
742 }
743 deinitParameters();
744 }
745
746 if (mChannelHandle) {
747 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
748 mChannelHandle);
749 LOGH("deleting channel %d", mChannelHandle);
750 mChannelHandle = 0;
751 }
752
753 if (mState != CLOSED)
754 closeCamera();
755
756 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
757 req.mPendingBufferList.clear();
758 }
759 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700760 for (pendingRequestIterator i = mPendingRequestsList.begin();
761 i != mPendingRequestsList.end();) {
762 i = erasePendingRequest(i);
763 }
764 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
765 if (mDefaultMetadata[i])
766 free_camera_metadata(mDefaultMetadata[i]);
767
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800768 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700769
770 pthread_cond_destroy(&mRequestCond);
771
772 pthread_cond_destroy(&mBuffersCond);
773
774 pthread_mutex_destroy(&mMutex);
775 LOGD("X");
776}
777
778/*===========================================================================
779 * FUNCTION : erasePendingRequest
780 *
781 * DESCRIPTION: function to erase a desired pending request after freeing any
782 * allocated memory
783 *
784 * PARAMETERS :
785 * @i : iterator pointing to pending request to be erased
786 *
787 * RETURN : iterator pointing to the next request
788 *==========================================================================*/
789QCamera3HardwareInterface::pendingRequestIterator
790 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
791{
792 if (i->input_buffer != NULL) {
793 free(i->input_buffer);
794 i->input_buffer = NULL;
795 }
796 if (i->settings != NULL)
797 free_camera_metadata((camera_metadata_t*)i->settings);
798 return mPendingRequestsList.erase(i);
799}
800
801/*===========================================================================
802 * FUNCTION : camEvtHandle
803 *
804 * DESCRIPTION: Function registered to mm-camera-interface to handle events
805 *
806 * PARAMETERS :
807 * @camera_handle : interface layer camera handle
808 * @evt : ptr to event
809 * @user_data : user data ptr
810 *
811 * RETURN : none
812 *==========================================================================*/
813void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
814 mm_camera_event_t *evt,
815 void *user_data)
816{
817 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
818 if (obj && evt) {
819 switch(evt->server_event_type) {
820 case CAM_EVENT_TYPE_DAEMON_DIED:
821 pthread_mutex_lock(&obj->mMutex);
822 obj->mState = ERROR;
823 pthread_mutex_unlock(&obj->mMutex);
824 LOGE("Fatal, camera daemon died");
825 break;
826
827 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
828 LOGD("HAL got request pull from Daemon");
829 pthread_mutex_lock(&obj->mMutex);
830 obj->mWokenUpByDaemon = true;
831 obj->unblockRequestIfNecessary();
832 pthread_mutex_unlock(&obj->mMutex);
833 break;
834
835 default:
836 LOGW("Warning: Unhandled event %d",
837 evt->server_event_type);
838 break;
839 }
840 } else {
841 LOGE("NULL user_data/evt");
842 }
843}
844
845/*===========================================================================
846 * FUNCTION : openCamera
847 *
848 * DESCRIPTION: open camera
849 *
850 * PARAMETERS :
851 * @hw_device : double ptr for camera device struct
852 *
853 * RETURN : int32_t type of status
854 * NO_ERROR -- success
855 * none-zero failure code
856 *==========================================================================*/
857int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
858{
859 int rc = 0;
860 if (mState != CLOSED) {
861 *hw_device = NULL;
862 return PERMISSION_DENIED;
863 }
864
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700865 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800866 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700867 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
868 mCameraId);
869
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700870 if (mCameraHandle) {
871 LOGE("Failure: Camera already opened");
872 return ALREADY_EXISTS;
873 }
874
875 {
876 Mutex::Autolock l(gHdrPlusClientLock);
877 if (gEaselManagerClient.isEaselPresentOnDevice()) {
878 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
879 rc = gEaselManagerClient.resume();
880 if (rc != 0) {
881 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
882 return rc;
883 }
884 }
885 }
886
Thierry Strudel3d639192016-09-09 11:52:26 -0700887 rc = openCamera();
888 if (rc == 0) {
889 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800890 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700891 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700892
893 // Suspend Easel because opening camera failed.
894 {
895 Mutex::Autolock l(gHdrPlusClientLock);
896 if (gEaselManagerClient.isEaselPresentOnDevice()) {
897 status_t suspendErr = gEaselManagerClient.suspend();
898 if (suspendErr != 0) {
899 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
900 strerror(-suspendErr), suspendErr);
901 }
902 }
903 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800904 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700905
Thierry Strudel3d639192016-09-09 11:52:26 -0700906 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
907 mCameraId, rc);
908
909 if (rc == NO_ERROR) {
910 mState = OPENED;
911 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800912
Thierry Strudel3d639192016-09-09 11:52:26 -0700913 return rc;
914}
915
916/*===========================================================================
917 * FUNCTION : openCamera
918 *
919 * DESCRIPTION: open camera
920 *
921 * PARAMETERS : none
922 *
923 * RETURN : int32_t type of status
924 * NO_ERROR -- success
925 * none-zero failure code
926 *==========================================================================*/
927int QCamera3HardwareInterface::openCamera()
928{
929 int rc = 0;
930 char value[PROPERTY_VALUE_MAX];
931
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800932 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800933
Thierry Strudel3d639192016-09-09 11:52:26 -0700934 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
935 if (rc < 0) {
936 LOGE("Failed to reserve flash for camera id: %d",
937 mCameraId);
938 return UNKNOWN_ERROR;
939 }
940
941 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
942 if (rc) {
943 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
944 return rc;
945 }
946
947 if (!mCameraHandle) {
948 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
949 return -ENODEV;
950 }
951
952 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
953 camEvtHandle, (void *)this);
954
955 if (rc < 0) {
956 LOGE("Error, failed to register event callback");
957 /* Not closing camera here since it is already handled in destructor */
958 return FAILED_TRANSACTION;
959 }
960
961 mExifParams.debug_params =
962 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
963 if (mExifParams.debug_params) {
964 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
965 } else {
966 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
967 return NO_MEMORY;
968 }
969 mFirstConfiguration = true;
970
971 //Notify display HAL that a camera session is active.
972 //But avoid calling the same during bootup because camera service might open/close
973 //cameras at boot time during its initialization and display service will also internally
974 //wait for camera service to initialize first while calling this display API, resulting in a
975 //deadlock situation. Since boot time camera open/close calls are made only to fetch
976 //capabilities, no need of this display bw optimization.
977 //Use "service.bootanim.exit" property to know boot status.
978 property_get("service.bootanim.exit", value, "0");
979 if (atoi(value) == 1) {
980 pthread_mutex_lock(&gCamLock);
981 if (gNumCameraSessions++ == 0) {
982 setCameraLaunchStatus(true);
983 }
984 pthread_mutex_unlock(&gCamLock);
985 }
986
987 //fill the session id needed while linking dual cam
988 pthread_mutex_lock(&gCamLock);
989 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
990 &sessionId[mCameraId]);
991 pthread_mutex_unlock(&gCamLock);
992
993 if (rc < 0) {
994 LOGE("Error, failed to get sessiion id");
995 return UNKNOWN_ERROR;
996 } else {
997 //Allocate related cam sync buffer
998 //this is needed for the payload that goes along with bundling cmd for related
999 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001000 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1001 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001002 if(rc != OK) {
1003 rc = NO_MEMORY;
1004 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1005 return NO_MEMORY;
1006 }
1007
1008 //Map memory for related cam sync buffer
1009 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001010 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1011 m_pDualCamCmdHeap->getFd(0),
1012 sizeof(cam_dual_camera_cmd_info_t),
1013 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001014 if(rc < 0) {
1015 LOGE("Dualcam: failed to map Related cam sync buffer");
1016 rc = FAILED_TRANSACTION;
1017 return NO_MEMORY;
1018 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001019 m_pDualCamCmdPtr =
1020 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001021 }
1022
1023 LOGH("mCameraId=%d",mCameraId);
1024
1025 return NO_ERROR;
1026}
1027
1028/*===========================================================================
1029 * FUNCTION : closeCamera
1030 *
1031 * DESCRIPTION: close camera
1032 *
1033 * PARAMETERS : none
1034 *
1035 * RETURN : int32_t type of status
1036 * NO_ERROR -- success
1037 * none-zero failure code
1038 *==========================================================================*/
1039int QCamera3HardwareInterface::closeCamera()
1040{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001041 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001042 int rc = NO_ERROR;
1043 char value[PROPERTY_VALUE_MAX];
1044
1045 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1046 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001047
1048 // unmap memory for related cam sync buffer
1049 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001050 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001051 if (NULL != m_pDualCamCmdHeap) {
1052 m_pDualCamCmdHeap->deallocate();
1053 delete m_pDualCamCmdHeap;
1054 m_pDualCamCmdHeap = NULL;
1055 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001056 }
1057
Thierry Strudel3d639192016-09-09 11:52:26 -07001058 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1059 mCameraHandle = NULL;
1060
1061 //reset session id to some invalid id
1062 pthread_mutex_lock(&gCamLock);
1063 sessionId[mCameraId] = 0xDEADBEEF;
1064 pthread_mutex_unlock(&gCamLock);
1065
1066 //Notify display HAL that there is no active camera session
1067 //but avoid calling the same during bootup. Refer to openCamera
1068 //for more details.
1069 property_get("service.bootanim.exit", value, "0");
1070 if (atoi(value) == 1) {
1071 pthread_mutex_lock(&gCamLock);
1072 if (--gNumCameraSessions == 0) {
1073 setCameraLaunchStatus(false);
1074 }
1075 pthread_mutex_unlock(&gCamLock);
1076 }
1077
Thierry Strudel3d639192016-09-09 11:52:26 -07001078 if (mExifParams.debug_params) {
1079 free(mExifParams.debug_params);
1080 mExifParams.debug_params = NULL;
1081 }
1082 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1083 LOGW("Failed to release flash for camera id: %d",
1084 mCameraId);
1085 }
1086 mState = CLOSED;
1087 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1088 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001089
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001090 {
1091 Mutex::Autolock l(gHdrPlusClientLock);
1092 if (gHdrPlusClient != nullptr) {
1093 // Disable HDR+ mode.
1094 disableHdrPlusModeLocked();
1095 // Disconnect Easel if it's connected.
1096 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1097 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001098 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001099
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001100 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001101 rc = gEaselManagerClient.stopMipi(mCameraId);
1102 if (rc != 0) {
1103 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1104 }
1105
1106 rc = gEaselManagerClient.suspend();
1107 if (rc != 0) {
1108 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1109 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001110 }
1111 }
1112
Thierry Strudel3d639192016-09-09 11:52:26 -07001113 return rc;
1114}
1115
1116/*===========================================================================
1117 * FUNCTION : initialize
1118 *
1119 * DESCRIPTION: Initialize frameworks callback functions
1120 *
1121 * PARAMETERS :
1122 * @callback_ops : callback function to frameworks
1123 *
1124 * RETURN :
1125 *
1126 *==========================================================================*/
1127int QCamera3HardwareInterface::initialize(
1128 const struct camera3_callback_ops *callback_ops)
1129{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001130 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001131 int rc;
1132
1133 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1134 pthread_mutex_lock(&mMutex);
1135
1136 // Validate current state
1137 switch (mState) {
1138 case OPENED:
1139 /* valid state */
1140 break;
1141 default:
1142 LOGE("Invalid state %d", mState);
1143 rc = -ENODEV;
1144 goto err1;
1145 }
1146
1147 rc = initParameters();
1148 if (rc < 0) {
1149 LOGE("initParamters failed %d", rc);
1150 goto err1;
1151 }
1152 mCallbackOps = callback_ops;
1153
1154 mChannelHandle = mCameraHandle->ops->add_channel(
1155 mCameraHandle->camera_handle, NULL, NULL, this);
1156 if (mChannelHandle == 0) {
1157 LOGE("add_channel failed");
1158 rc = -ENOMEM;
1159 pthread_mutex_unlock(&mMutex);
1160 return rc;
1161 }
1162
1163 pthread_mutex_unlock(&mMutex);
1164 mCameraInitialized = true;
1165 mState = INITIALIZED;
1166 LOGI("X");
1167 return 0;
1168
1169err1:
1170 pthread_mutex_unlock(&mMutex);
1171 return rc;
1172}
1173
1174/*===========================================================================
1175 * FUNCTION : validateStreamDimensions
1176 *
1177 * DESCRIPTION: Check if the configuration requested are those advertised
1178 *
1179 * PARAMETERS :
1180 * @stream_list : streams to be configured
1181 *
1182 * RETURN :
1183 *
1184 *==========================================================================*/
1185int QCamera3HardwareInterface::validateStreamDimensions(
1186 camera3_stream_configuration_t *streamList)
1187{
1188 int rc = NO_ERROR;
1189 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001190 uint32_t depthWidth = 0;
1191 uint32_t depthHeight = 0;
1192 if (mPDSupported) {
1193 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1194 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1195 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001196
1197 camera3_stream_t *inputStream = NULL;
1198 /*
1199 * Loop through all streams to find input stream if it exists*
1200 */
1201 for (size_t i = 0; i< streamList->num_streams; i++) {
1202 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1203 if (inputStream != NULL) {
1204 LOGE("Error, Multiple input streams requested");
1205 return -EINVAL;
1206 }
1207 inputStream = streamList->streams[i];
1208 }
1209 }
1210 /*
1211 * Loop through all streams requested in configuration
1212 * Check if unsupported sizes have been requested on any of them
1213 */
1214 for (size_t j = 0; j < streamList->num_streams; j++) {
1215 bool sizeFound = false;
1216 camera3_stream_t *newStream = streamList->streams[j];
1217
1218 uint32_t rotatedHeight = newStream->height;
1219 uint32_t rotatedWidth = newStream->width;
1220 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1221 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1222 rotatedHeight = newStream->width;
1223 rotatedWidth = newStream->height;
1224 }
1225
1226 /*
1227 * Sizes are different for each type of stream format check against
1228 * appropriate table.
1229 */
1230 switch (newStream->format) {
1231 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1232 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1233 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001234 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1235 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1236 mPDSupported) {
1237 if ((depthWidth == newStream->width) &&
1238 (depthHeight == newStream->height)) {
1239 sizeFound = true;
1240 }
1241 break;
1242 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001243 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1244 for (size_t i = 0; i < count; i++) {
1245 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1246 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1247 sizeFound = true;
1248 break;
1249 }
1250 }
1251 break;
1252 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001253 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1254 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001255 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001256 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001257 if ((depthSamplesCount == newStream->width) &&
1258 (1 == newStream->height)) {
1259 sizeFound = true;
1260 }
1261 break;
1262 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001263 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1264 /* Verify set size against generated sizes table */
1265 for (size_t i = 0; i < count; i++) {
1266 if (((int32_t)rotatedWidth ==
1267 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1268 ((int32_t)rotatedHeight ==
1269 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1270 sizeFound = true;
1271 break;
1272 }
1273 }
1274 break;
1275 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1276 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1277 default:
1278 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1279 || newStream->stream_type == CAMERA3_STREAM_INPUT
1280 || IS_USAGE_ZSL(newStream->usage)) {
1281 if (((int32_t)rotatedWidth ==
1282 gCamCapability[mCameraId]->active_array_size.width) &&
1283 ((int32_t)rotatedHeight ==
1284 gCamCapability[mCameraId]->active_array_size.height)) {
1285 sizeFound = true;
1286 break;
1287 }
1288 /* We could potentially break here to enforce ZSL stream
1289 * set from frameworks always is full active array size
1290 * but it is not clear from the spc if framework will always
1291 * follow that, also we have logic to override to full array
1292 * size, so keeping the logic lenient at the moment
1293 */
1294 }
1295 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1296 MAX_SIZES_CNT);
1297 for (size_t i = 0; i < count; i++) {
1298 if (((int32_t)rotatedWidth ==
1299 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1300 ((int32_t)rotatedHeight ==
1301 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1302 sizeFound = true;
1303 break;
1304 }
1305 }
1306 break;
1307 } /* End of switch(newStream->format) */
1308
1309 /* We error out even if a single stream has unsupported size set */
1310 if (!sizeFound) {
1311 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1312 rotatedWidth, rotatedHeight, newStream->format,
1313 gCamCapability[mCameraId]->active_array_size.width,
1314 gCamCapability[mCameraId]->active_array_size.height);
1315 rc = -EINVAL;
1316 break;
1317 }
1318 } /* End of for each stream */
1319 return rc;
1320}
1321
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001322/*===========================================================================
1323 * FUNCTION : validateUsageFlags
1324 *
1325 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1326 *
1327 * PARAMETERS :
1328 * @stream_list : streams to be configured
1329 *
1330 * RETURN :
1331 * NO_ERROR if the usage flags are supported
1332 * error code if usage flags are not supported
1333 *
1334 *==========================================================================*/
1335int QCamera3HardwareInterface::validateUsageFlags(
1336 const camera3_stream_configuration_t* streamList)
1337{
1338 for (size_t j = 0; j < streamList->num_streams; j++) {
1339 const camera3_stream_t *newStream = streamList->streams[j];
1340
1341 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1342 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1343 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1344 continue;
1345 }
1346
Jason Leec4cf5032017-05-24 18:31:41 -07001347 // Here we only care whether it's EIS3 or not
1348 char is_type_value[PROPERTY_VALUE_MAX];
1349 property_get("persist.camera.is_type", is_type_value, "4");
1350 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1351 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1352 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1353 isType = IS_TYPE_NONE;
1354
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001355 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1356 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1357 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1358 bool forcePreviewUBWC = true;
1359 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1360 forcePreviewUBWC = false;
1361 }
1362 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001363 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001364 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001365 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001366 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001367 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001368
1369 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1370 // So color spaces will always match.
1371
1372 // Check whether underlying formats of shared streams match.
1373 if (isVideo && isPreview && videoFormat != previewFormat) {
1374 LOGE("Combined video and preview usage flag is not supported");
1375 return -EINVAL;
1376 }
1377 if (isPreview && isZSL && previewFormat != zslFormat) {
1378 LOGE("Combined preview and zsl usage flag is not supported");
1379 return -EINVAL;
1380 }
1381 if (isVideo && isZSL && videoFormat != zslFormat) {
1382 LOGE("Combined video and zsl usage flag is not supported");
1383 return -EINVAL;
1384 }
1385 }
1386 return NO_ERROR;
1387}
1388
1389/*===========================================================================
1390 * FUNCTION : validateUsageFlagsForEis
1391 *
1392 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1393 *
1394 * PARAMETERS :
1395 * @stream_list : streams to be configured
1396 *
1397 * RETURN :
1398 * NO_ERROR if the usage flags are supported
1399 * error code if usage flags are not supported
1400 *
1401 *==========================================================================*/
1402int QCamera3HardwareInterface::validateUsageFlagsForEis(
1403 const camera3_stream_configuration_t* streamList)
1404{
1405 for (size_t j = 0; j < streamList->num_streams; j++) {
1406 const camera3_stream_t *newStream = streamList->streams[j];
1407
1408 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1409 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1410
1411 // Because EIS is "hard-coded" for certain use case, and current
1412 // implementation doesn't support shared preview and video on the same
1413 // stream, return failure if EIS is forced on.
1414 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1415 LOGE("Combined video and preview usage flag is not supported due to EIS");
1416 return -EINVAL;
1417 }
1418 }
1419 return NO_ERROR;
1420}
1421
Thierry Strudel3d639192016-09-09 11:52:26 -07001422/*==============================================================================
1423 * FUNCTION : isSupportChannelNeeded
1424 *
1425 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1426 *
1427 * PARAMETERS :
1428 * @stream_list : streams to be configured
1429 * @stream_config_info : the config info for streams to be configured
1430 *
1431 * RETURN : Boolen true/false decision
1432 *
1433 *==========================================================================*/
1434bool QCamera3HardwareInterface::isSupportChannelNeeded(
1435 camera3_stream_configuration_t *streamList,
1436 cam_stream_size_info_t stream_config_info)
1437{
1438 uint32_t i;
1439 bool pprocRequested = false;
1440 /* Check for conditions where PProc pipeline does not have any streams*/
1441 for (i = 0; i < stream_config_info.num_streams; i++) {
1442 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1443 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1444 pprocRequested = true;
1445 break;
1446 }
1447 }
1448
1449 if (pprocRequested == false )
1450 return true;
1451
1452 /* Dummy stream needed if only raw or jpeg streams present */
1453 for (i = 0; i < streamList->num_streams; i++) {
1454 switch(streamList->streams[i]->format) {
1455 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1456 case HAL_PIXEL_FORMAT_RAW10:
1457 case HAL_PIXEL_FORMAT_RAW16:
1458 case HAL_PIXEL_FORMAT_BLOB:
1459 break;
1460 default:
1461 return false;
1462 }
1463 }
1464 return true;
1465}
1466
1467/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001468 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001469 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001470 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001471 *
1472 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001473 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001474 *
1475 * RETURN : int32_t type of status
1476 * NO_ERROR -- success
1477 * none-zero failure code
1478 *
1479 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001480int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001481{
1482 int32_t rc = NO_ERROR;
1483
1484 cam_dimension_t max_dim = {0, 0};
1485 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1486 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1487 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1488 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1489 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1490 }
1491
1492 clear_metadata_buffer(mParameters);
1493
1494 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1495 max_dim);
1496 if (rc != NO_ERROR) {
1497 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1498 return rc;
1499 }
1500
1501 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1502 if (rc != NO_ERROR) {
1503 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1504 return rc;
1505 }
1506
1507 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001508 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001509
1510 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1511 mParameters);
1512 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001513 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001514 return rc;
1515 }
1516
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001517 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001518 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1519 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1520 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1521 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1522 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001523
1524 return rc;
1525}
1526
1527/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001528 * FUNCTION : getCurrentSensorModeInfo
1529 *
1530 * DESCRIPTION: Get sensor mode information that is currently selected.
1531 *
1532 * PARAMETERS :
1533 * @sensorModeInfo : sensor mode information (output)
1534 *
1535 * RETURN : int32_t type of status
1536 * NO_ERROR -- success
1537 * none-zero failure code
1538 *
1539 *==========================================================================*/
1540int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1541{
1542 int32_t rc = NO_ERROR;
1543
1544 clear_metadata_buffer(mParameters);
1545 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1546
1547 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1548 mParameters);
1549 if (rc != NO_ERROR) {
1550 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1551 return rc;
1552 }
1553
1554 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1555 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1556 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1557 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1558 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1559 sensorModeInfo.num_raw_bits);
1560
1561 return rc;
1562}
1563
1564/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001565 * FUNCTION : addToPPFeatureMask
1566 *
1567 * DESCRIPTION: add additional features to pp feature mask based on
1568 * stream type and usecase
1569 *
1570 * PARAMETERS :
1571 * @stream_format : stream type for feature mask
1572 * @stream_idx : stream idx within postprocess_mask list to change
1573 *
1574 * RETURN : NULL
1575 *
1576 *==========================================================================*/
1577void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1578 uint32_t stream_idx)
1579{
1580 char feature_mask_value[PROPERTY_VALUE_MAX];
1581 cam_feature_mask_t feature_mask;
1582 int args_converted;
1583 int property_len;
1584
1585 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001586#ifdef _LE_CAMERA_
1587 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1588 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1589 property_len = property_get("persist.camera.hal3.feature",
1590 feature_mask_value, swtnr_feature_mask_value);
1591#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001592 property_len = property_get("persist.camera.hal3.feature",
1593 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001594#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001595 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1596 (feature_mask_value[1] == 'x')) {
1597 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1598 } else {
1599 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1600 }
1601 if (1 != args_converted) {
1602 feature_mask = 0;
1603 LOGE("Wrong feature mask %s", feature_mask_value);
1604 return;
1605 }
1606
1607 switch (stream_format) {
1608 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1609 /* Add LLVD to pp feature mask only if video hint is enabled */
1610 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1611 mStreamConfigInfo.postprocess_mask[stream_idx]
1612 |= CAM_QTI_FEATURE_SW_TNR;
1613 LOGH("Added SW TNR to pp feature mask");
1614 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1615 mStreamConfigInfo.postprocess_mask[stream_idx]
1616 |= CAM_QCOM_FEATURE_LLVD;
1617 LOGH("Added LLVD SeeMore to pp feature mask");
1618 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001619 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1620 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1621 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1622 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001623 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1624 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1625 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1626 CAM_QTI_FEATURE_BINNING_CORRECTION;
1627 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001628 break;
1629 }
1630 default:
1631 break;
1632 }
1633 LOGD("PP feature mask %llx",
1634 mStreamConfigInfo.postprocess_mask[stream_idx]);
1635}
1636
1637/*==============================================================================
1638 * FUNCTION : updateFpsInPreviewBuffer
1639 *
1640 * DESCRIPTION: update FPS information in preview buffer.
1641 *
1642 * PARAMETERS :
1643 * @metadata : pointer to metadata buffer
1644 * @frame_number: frame_number to look for in pending buffer list
1645 *
1646 * RETURN : None
1647 *
1648 *==========================================================================*/
1649void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1650 uint32_t frame_number)
1651{
1652 // Mark all pending buffers for this particular request
1653 // with corresponding framerate information
1654 for (List<PendingBuffersInRequest>::iterator req =
1655 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1656 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1657 for(List<PendingBufferInfo>::iterator j =
1658 req->mPendingBufferList.begin();
1659 j != req->mPendingBufferList.end(); j++) {
1660 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1661 if ((req->frame_number == frame_number) &&
1662 (channel->getStreamTypeMask() &
1663 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1664 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1665 CAM_INTF_PARM_FPS_RANGE, metadata) {
1666 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1667 struct private_handle_t *priv_handle =
1668 (struct private_handle_t *)(*(j->buffer));
1669 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1670 }
1671 }
1672 }
1673 }
1674}
1675
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001676/*==============================================================================
1677 * FUNCTION : updateTimeStampInPendingBuffers
1678 *
1679 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1680 * of a frame number
1681 *
1682 * PARAMETERS :
1683 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1684 * @timestamp : timestamp to be set
1685 *
1686 * RETURN : None
1687 *
1688 *==========================================================================*/
1689void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1690 uint32_t frameNumber, nsecs_t timestamp)
1691{
1692 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1693 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1694 if (req->frame_number != frameNumber)
1695 continue;
1696
1697 for (auto k = req->mPendingBufferList.begin();
1698 k != req->mPendingBufferList.end(); k++ ) {
1699 struct private_handle_t *priv_handle =
1700 (struct private_handle_t *) (*(k->buffer));
1701 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1702 }
1703 }
1704 return;
1705}
1706
Thierry Strudel3d639192016-09-09 11:52:26 -07001707/*===========================================================================
1708 * FUNCTION : configureStreams
1709 *
1710 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1711 * and output streams.
1712 *
1713 * PARAMETERS :
1714 * @stream_list : streams to be configured
1715 *
1716 * RETURN :
1717 *
1718 *==========================================================================*/
1719int QCamera3HardwareInterface::configureStreams(
1720 camera3_stream_configuration_t *streamList)
1721{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001722 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001723 int rc = 0;
1724
1725 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001726 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001727 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001728 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001729
1730 return rc;
1731}
1732
1733/*===========================================================================
1734 * FUNCTION : configureStreamsPerfLocked
1735 *
1736 * DESCRIPTION: configureStreams while perfLock is held.
1737 *
1738 * PARAMETERS :
1739 * @stream_list : streams to be configured
1740 *
1741 * RETURN : int32_t type of status
1742 * NO_ERROR -- success
1743 * none-zero failure code
1744 *==========================================================================*/
1745int QCamera3HardwareInterface::configureStreamsPerfLocked(
1746 camera3_stream_configuration_t *streamList)
1747{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001748 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001749 int rc = 0;
1750
1751 // Sanity check stream_list
1752 if (streamList == NULL) {
1753 LOGE("NULL stream configuration");
1754 return BAD_VALUE;
1755 }
1756 if (streamList->streams == NULL) {
1757 LOGE("NULL stream list");
1758 return BAD_VALUE;
1759 }
1760
1761 if (streamList->num_streams < 1) {
1762 LOGE("Bad number of streams requested: %d",
1763 streamList->num_streams);
1764 return BAD_VALUE;
1765 }
1766
1767 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1768 LOGE("Maximum number of streams %d exceeded: %d",
1769 MAX_NUM_STREAMS, streamList->num_streams);
1770 return BAD_VALUE;
1771 }
1772
Jason Leec4cf5032017-05-24 18:31:41 -07001773 mOpMode = streamList->operation_mode;
1774 LOGD("mOpMode: %d", mOpMode);
1775
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001776 rc = validateUsageFlags(streamList);
1777 if (rc != NO_ERROR) {
1778 return rc;
1779 }
1780
Thierry Strudel3d639192016-09-09 11:52:26 -07001781 /* first invalidate all the steams in the mStreamList
1782 * if they appear again, they will be validated */
1783 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1784 it != mStreamInfo.end(); it++) {
1785 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1786 if (channel) {
1787 channel->stop();
1788 }
1789 (*it)->status = INVALID;
1790 }
1791
1792 if (mRawDumpChannel) {
1793 mRawDumpChannel->stop();
1794 delete mRawDumpChannel;
1795 mRawDumpChannel = NULL;
1796 }
1797
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001798 if (mHdrPlusRawSrcChannel) {
1799 mHdrPlusRawSrcChannel->stop();
1800 delete mHdrPlusRawSrcChannel;
1801 mHdrPlusRawSrcChannel = NULL;
1802 }
1803
Thierry Strudel3d639192016-09-09 11:52:26 -07001804 if (mSupportChannel)
1805 mSupportChannel->stop();
1806
1807 if (mAnalysisChannel) {
1808 mAnalysisChannel->stop();
1809 }
1810 if (mMetadataChannel) {
1811 /* If content of mStreamInfo is not 0, there is metadata stream */
1812 mMetadataChannel->stop();
1813 }
1814 if (mChannelHandle) {
1815 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1816 mChannelHandle);
1817 LOGD("stopping channel %d", mChannelHandle);
1818 }
1819
1820 pthread_mutex_lock(&mMutex);
1821
1822 // Check state
1823 switch (mState) {
1824 case INITIALIZED:
1825 case CONFIGURED:
1826 case STARTED:
1827 /* valid state */
1828 break;
1829 default:
1830 LOGE("Invalid state %d", mState);
1831 pthread_mutex_unlock(&mMutex);
1832 return -ENODEV;
1833 }
1834
1835 /* Check whether we have video stream */
1836 m_bIs4KVideo = false;
1837 m_bIsVideo = false;
1838 m_bEisSupportedSize = false;
1839 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001840 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001841 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001842 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001843 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001844 uint32_t videoWidth = 0U;
1845 uint32_t videoHeight = 0U;
1846 size_t rawStreamCnt = 0;
1847 size_t stallStreamCnt = 0;
1848 size_t processedStreamCnt = 0;
1849 // Number of streams on ISP encoder path
1850 size_t numStreamsOnEncoder = 0;
1851 size_t numYuv888OnEncoder = 0;
1852 bool bYuv888OverrideJpeg = false;
1853 cam_dimension_t largeYuv888Size = {0, 0};
1854 cam_dimension_t maxViewfinderSize = {0, 0};
1855 bool bJpegExceeds4K = false;
1856 bool bJpegOnEncoder = false;
1857 bool bUseCommonFeatureMask = false;
1858 cam_feature_mask_t commonFeatureMask = 0;
1859 bool bSmallJpegSize = false;
1860 uint32_t width_ratio;
1861 uint32_t height_ratio;
1862 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1863 camera3_stream_t *inputStream = NULL;
1864 bool isJpeg = false;
1865 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001866 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001867 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001868
1869 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1870
1871 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001872 uint8_t eis_prop_set;
1873 uint32_t maxEisWidth = 0;
1874 uint32_t maxEisHeight = 0;
1875
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001876 // Initialize all instant AEC related variables
1877 mInstantAEC = false;
1878 mResetInstantAEC = false;
1879 mInstantAECSettledFrameNumber = 0;
1880 mAecSkipDisplayFrameBound = 0;
1881 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001882 mCurrFeatureState = 0;
1883 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001884
Thierry Strudel3d639192016-09-09 11:52:26 -07001885 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1886
1887 size_t count = IS_TYPE_MAX;
1888 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1889 for (size_t i = 0; i < count; i++) {
1890 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001891 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1892 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001893 break;
1894 }
1895 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001896
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001897 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001898 maxEisWidth = MAX_EIS_WIDTH;
1899 maxEisHeight = MAX_EIS_HEIGHT;
1900 }
1901
1902 /* EIS setprop control */
1903 char eis_prop[PROPERTY_VALUE_MAX];
1904 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001905 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001906 eis_prop_set = (uint8_t)atoi(eis_prop);
1907
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001908 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001909 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1910
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001911 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1912 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001913
Thierry Strudel3d639192016-09-09 11:52:26 -07001914 /* stream configurations */
1915 for (size_t i = 0; i < streamList->num_streams; i++) {
1916 camera3_stream_t *newStream = streamList->streams[i];
1917 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1918 "height = %d, rotation = %d, usage = 0x%x",
1919 i, newStream->stream_type, newStream->format,
1920 newStream->width, newStream->height, newStream->rotation,
1921 newStream->usage);
1922 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1923 newStream->stream_type == CAMERA3_STREAM_INPUT){
1924 isZsl = true;
1925 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001926 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1927 IS_USAGE_PREVIEW(newStream->usage)) {
1928 isPreview = true;
1929 }
1930
Thierry Strudel3d639192016-09-09 11:52:26 -07001931 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1932 inputStream = newStream;
1933 }
1934
Emilian Peev7650c122017-01-19 08:24:33 -08001935 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1936 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001937 isJpeg = true;
1938 jpegSize.width = newStream->width;
1939 jpegSize.height = newStream->height;
1940 if (newStream->width > VIDEO_4K_WIDTH ||
1941 newStream->height > VIDEO_4K_HEIGHT)
1942 bJpegExceeds4K = true;
1943 }
1944
1945 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1946 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1947 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001948 // In HAL3 we can have multiple different video streams.
1949 // The variables video width and height are used below as
1950 // dimensions of the biggest of them
1951 if (videoWidth < newStream->width ||
1952 videoHeight < newStream->height) {
1953 videoWidth = newStream->width;
1954 videoHeight = newStream->height;
1955 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001956 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1957 (VIDEO_4K_HEIGHT <= newStream->height)) {
1958 m_bIs4KVideo = true;
1959 }
1960 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1961 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001962
Thierry Strudel3d639192016-09-09 11:52:26 -07001963 }
1964 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1965 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1966 switch (newStream->format) {
1967 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001968 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1969 depthPresent = true;
1970 break;
1971 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001972 stallStreamCnt++;
1973 if (isOnEncoder(maxViewfinderSize, newStream->width,
1974 newStream->height)) {
1975 numStreamsOnEncoder++;
1976 bJpegOnEncoder = true;
1977 }
1978 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1979 newStream->width);
1980 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1981 newStream->height);;
1982 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1983 "FATAL: max_downscale_factor cannot be zero and so assert");
1984 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1985 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1986 LOGH("Setting small jpeg size flag to true");
1987 bSmallJpegSize = true;
1988 }
1989 break;
1990 case HAL_PIXEL_FORMAT_RAW10:
1991 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1992 case HAL_PIXEL_FORMAT_RAW16:
1993 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001994 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1995 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1996 pdStatCount++;
1997 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001998 break;
1999 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2000 processedStreamCnt++;
2001 if (isOnEncoder(maxViewfinderSize, newStream->width,
2002 newStream->height)) {
2003 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2004 !IS_USAGE_ZSL(newStream->usage)) {
2005 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2006 }
2007 numStreamsOnEncoder++;
2008 }
2009 break;
2010 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2011 processedStreamCnt++;
2012 if (isOnEncoder(maxViewfinderSize, newStream->width,
2013 newStream->height)) {
2014 // If Yuv888 size is not greater than 4K, set feature mask
2015 // to SUPERSET so that it support concurrent request on
2016 // YUV and JPEG.
2017 if (newStream->width <= VIDEO_4K_WIDTH &&
2018 newStream->height <= VIDEO_4K_HEIGHT) {
2019 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2020 }
2021 numStreamsOnEncoder++;
2022 numYuv888OnEncoder++;
2023 largeYuv888Size.width = newStream->width;
2024 largeYuv888Size.height = newStream->height;
2025 }
2026 break;
2027 default:
2028 processedStreamCnt++;
2029 if (isOnEncoder(maxViewfinderSize, newStream->width,
2030 newStream->height)) {
2031 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2032 numStreamsOnEncoder++;
2033 }
2034 break;
2035 }
2036
2037 }
2038 }
2039
2040 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2041 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2042 !m_bIsVideo) {
2043 m_bEisEnable = false;
2044 }
2045
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002046 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2047 pthread_mutex_unlock(&mMutex);
2048 return -EINVAL;
2049 }
2050
Thierry Strudel54dc9782017-02-15 12:12:10 -08002051 uint8_t forceEnableTnr = 0;
2052 char tnr_prop[PROPERTY_VALUE_MAX];
2053 memset(tnr_prop, 0, sizeof(tnr_prop));
2054 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2055 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2056
Thierry Strudel3d639192016-09-09 11:52:26 -07002057 /* Logic to enable/disable TNR based on specific config size/etc.*/
2058 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002059 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2060 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002061 else if (forceEnableTnr)
2062 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002063
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002064 char videoHdrProp[PROPERTY_VALUE_MAX];
2065 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2066 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2067 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2068
2069 if (hdr_mode_prop == 1 && m_bIsVideo &&
2070 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2071 m_bVideoHdrEnabled = true;
2072 else
2073 m_bVideoHdrEnabled = false;
2074
2075
Thierry Strudel3d639192016-09-09 11:52:26 -07002076 /* Check if num_streams is sane */
2077 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2078 rawStreamCnt > MAX_RAW_STREAMS ||
2079 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2080 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2081 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2082 pthread_mutex_unlock(&mMutex);
2083 return -EINVAL;
2084 }
2085 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002086 if (isZsl && m_bIs4KVideo) {
2087 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002088 pthread_mutex_unlock(&mMutex);
2089 return -EINVAL;
2090 }
2091 /* Check if stream sizes are sane */
2092 if (numStreamsOnEncoder > 2) {
2093 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2094 pthread_mutex_unlock(&mMutex);
2095 return -EINVAL;
2096 } else if (1 < numStreamsOnEncoder){
2097 bUseCommonFeatureMask = true;
2098 LOGH("Multiple streams above max viewfinder size, common mask needed");
2099 }
2100
2101 /* Check if BLOB size is greater than 4k in 4k recording case */
2102 if (m_bIs4KVideo && bJpegExceeds4K) {
2103 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2104 pthread_mutex_unlock(&mMutex);
2105 return -EINVAL;
2106 }
2107
Emilian Peev7650c122017-01-19 08:24:33 -08002108 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2109 depthPresent) {
2110 LOGE("HAL doesn't support depth streams in HFR mode!");
2111 pthread_mutex_unlock(&mMutex);
2112 return -EINVAL;
2113 }
2114
Thierry Strudel3d639192016-09-09 11:52:26 -07002115 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2116 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2117 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2118 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2119 // configurations:
2120 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2121 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2122 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2123 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2124 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2125 __func__);
2126 pthread_mutex_unlock(&mMutex);
2127 return -EINVAL;
2128 }
2129
2130 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2131 // the YUV stream's size is greater or equal to the JPEG size, set common
2132 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2133 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2134 jpegSize.width, jpegSize.height) &&
2135 largeYuv888Size.width > jpegSize.width &&
2136 largeYuv888Size.height > jpegSize.height) {
2137 bYuv888OverrideJpeg = true;
2138 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2139 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2140 }
2141
2142 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2143 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2144 commonFeatureMask);
2145 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2146 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2147
2148 rc = validateStreamDimensions(streamList);
2149 if (rc == NO_ERROR) {
2150 rc = validateStreamRotations(streamList);
2151 }
2152 if (rc != NO_ERROR) {
2153 LOGE("Invalid stream configuration requested!");
2154 pthread_mutex_unlock(&mMutex);
2155 return rc;
2156 }
2157
Emilian Peev0f3c3162017-03-15 12:57:46 +00002158 if (1 < pdStatCount) {
2159 LOGE("HAL doesn't support multiple PD streams");
2160 pthread_mutex_unlock(&mMutex);
2161 return -EINVAL;
2162 }
2163
2164 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2165 (1 == pdStatCount)) {
2166 LOGE("HAL doesn't support PD streams in HFR mode!");
2167 pthread_mutex_unlock(&mMutex);
2168 return -EINVAL;
2169 }
2170
Thierry Strudel3d639192016-09-09 11:52:26 -07002171 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2172 for (size_t i = 0; i < streamList->num_streams; i++) {
2173 camera3_stream_t *newStream = streamList->streams[i];
2174 LOGH("newStream type = %d, stream format = %d "
2175 "stream size : %d x %d, stream rotation = %d",
2176 newStream->stream_type, newStream->format,
2177 newStream->width, newStream->height, newStream->rotation);
2178 //if the stream is in the mStreamList validate it
2179 bool stream_exists = false;
2180 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2181 it != mStreamInfo.end(); it++) {
2182 if ((*it)->stream == newStream) {
2183 QCamera3ProcessingChannel *channel =
2184 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2185 stream_exists = true;
2186 if (channel)
2187 delete channel;
2188 (*it)->status = VALID;
2189 (*it)->stream->priv = NULL;
2190 (*it)->channel = NULL;
2191 }
2192 }
2193 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2194 //new stream
2195 stream_info_t* stream_info;
2196 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2197 if (!stream_info) {
2198 LOGE("Could not allocate stream info");
2199 rc = -ENOMEM;
2200 pthread_mutex_unlock(&mMutex);
2201 return rc;
2202 }
2203 stream_info->stream = newStream;
2204 stream_info->status = VALID;
2205 stream_info->channel = NULL;
2206 mStreamInfo.push_back(stream_info);
2207 }
2208 /* Covers Opaque ZSL and API1 F/W ZSL */
2209 if (IS_USAGE_ZSL(newStream->usage)
2210 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2211 if (zslStream != NULL) {
2212 LOGE("Multiple input/reprocess streams requested!");
2213 pthread_mutex_unlock(&mMutex);
2214 return BAD_VALUE;
2215 }
2216 zslStream = newStream;
2217 }
2218 /* Covers YUV reprocess */
2219 if (inputStream != NULL) {
2220 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2221 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2222 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2223 && inputStream->width == newStream->width
2224 && inputStream->height == newStream->height) {
2225 if (zslStream != NULL) {
2226 /* This scenario indicates multiple YUV streams with same size
2227 * as input stream have been requested, since zsl stream handle
2228 * is solely use for the purpose of overriding the size of streams
2229 * which share h/w streams we will just make a guess here as to
2230 * which of the stream is a ZSL stream, this will be refactored
2231 * once we make generic logic for streams sharing encoder output
2232 */
2233 LOGH("Warning, Multiple ip/reprocess streams requested!");
2234 }
2235 zslStream = newStream;
2236 }
2237 }
2238 }
2239
2240 /* If a zsl stream is set, we know that we have configured at least one input or
2241 bidirectional stream */
2242 if (NULL != zslStream) {
2243 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2244 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2245 mInputStreamInfo.format = zslStream->format;
2246 mInputStreamInfo.usage = zslStream->usage;
2247 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2248 mInputStreamInfo.dim.width,
2249 mInputStreamInfo.dim.height,
2250 mInputStreamInfo.format, mInputStreamInfo.usage);
2251 }
2252
2253 cleanAndSortStreamInfo();
2254 if (mMetadataChannel) {
2255 delete mMetadataChannel;
2256 mMetadataChannel = NULL;
2257 }
2258 if (mSupportChannel) {
2259 delete mSupportChannel;
2260 mSupportChannel = NULL;
2261 }
2262
2263 if (mAnalysisChannel) {
2264 delete mAnalysisChannel;
2265 mAnalysisChannel = NULL;
2266 }
2267
2268 if (mDummyBatchChannel) {
2269 delete mDummyBatchChannel;
2270 mDummyBatchChannel = NULL;
2271 }
2272
Emilian Peev7650c122017-01-19 08:24:33 -08002273 if (mDepthChannel) {
2274 mDepthChannel = NULL;
2275 }
2276
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002277 mShutterDispatcher.clear();
2278 mOutputBufferDispatcher.clear();
2279
Thierry Strudel2896d122017-02-23 19:18:03 -08002280 char is_type_value[PROPERTY_VALUE_MAX];
2281 property_get("persist.camera.is_type", is_type_value, "4");
2282 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2283
Binhao Line406f062017-05-03 14:39:44 -07002284 char property_value[PROPERTY_VALUE_MAX];
2285 property_get("persist.camera.gzoom.at", property_value, "0");
2286 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002287 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2288 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2289 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2290 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002291
2292 property_get("persist.camera.gzoom.4k", property_value, "0");
2293 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2294
Thierry Strudel3d639192016-09-09 11:52:26 -07002295 //Create metadata channel and initialize it
2296 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2297 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2298 gCamCapability[mCameraId]->color_arrangement);
2299 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2300 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002301 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002302 if (mMetadataChannel == NULL) {
2303 LOGE("failed to allocate metadata channel");
2304 rc = -ENOMEM;
2305 pthread_mutex_unlock(&mMutex);
2306 return rc;
2307 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002308 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002309 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2310 if (rc < 0) {
2311 LOGE("metadata channel initialization failed");
2312 delete mMetadataChannel;
2313 mMetadataChannel = NULL;
2314 pthread_mutex_unlock(&mMutex);
2315 return rc;
2316 }
2317
Thierry Strudel2896d122017-02-23 19:18:03 -08002318 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002319 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002320 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002321 // Keep track of preview/video streams indices.
2322 // There could be more than one preview streams, but only one video stream.
2323 int32_t video_stream_idx = -1;
2324 int32_t preview_stream_idx[streamList->num_streams];
2325 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002326 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2327 /* Allocate channel objects for the requested streams */
2328 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002329
Thierry Strudel3d639192016-09-09 11:52:26 -07002330 camera3_stream_t *newStream = streamList->streams[i];
2331 uint32_t stream_usage = newStream->usage;
2332 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2333 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2334 struct camera_info *p_info = NULL;
2335 pthread_mutex_lock(&gCamLock);
2336 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2337 pthread_mutex_unlock(&gCamLock);
2338 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2339 || IS_USAGE_ZSL(newStream->usage)) &&
2340 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002341 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002342 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002343 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2344 if (bUseCommonFeatureMask)
2345 zsl_ppmask = commonFeatureMask;
2346 else
2347 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002348 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002349 if (numStreamsOnEncoder > 0)
2350 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2351 else
2352 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002353 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002354 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002355 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002356 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002357 LOGH("Input stream configured, reprocess config");
2358 } else {
2359 //for non zsl streams find out the format
2360 switch (newStream->format) {
2361 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2362 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002363 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002364 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2365 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2366 /* add additional features to pp feature mask */
2367 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2368 mStreamConfigInfo.num_streams);
2369
2370 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2371 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2372 CAM_STREAM_TYPE_VIDEO;
2373 if (m_bTnrEnabled && m_bTnrVideo) {
2374 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2375 CAM_QCOM_FEATURE_CPP_TNR;
2376 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2377 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2378 ~CAM_QCOM_FEATURE_CDS;
2379 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002380 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2381 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2382 CAM_QTI_FEATURE_PPEISCORE;
2383 }
Binhao Line406f062017-05-03 14:39:44 -07002384 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2385 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2386 CAM_QCOM_FEATURE_GOOG_ZOOM;
2387 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002388 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002389 } else {
2390 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2391 CAM_STREAM_TYPE_PREVIEW;
2392 if (m_bTnrEnabled && m_bTnrPreview) {
2393 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2394 CAM_QCOM_FEATURE_CPP_TNR;
2395 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2396 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2397 ~CAM_QCOM_FEATURE_CDS;
2398 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002399 if(!m_bSwTnrPreview) {
2400 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2401 ~CAM_QTI_FEATURE_SW_TNR;
2402 }
Binhao Line406f062017-05-03 14:39:44 -07002403 if (is_goog_zoom_preview_enabled) {
2404 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2405 CAM_QCOM_FEATURE_GOOG_ZOOM;
2406 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002407 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002408 padding_info.width_padding = mSurfaceStridePadding;
2409 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002410 previewSize.width = (int32_t)newStream->width;
2411 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002412 }
2413 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2414 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2415 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2416 newStream->height;
2417 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2418 newStream->width;
2419 }
2420 }
2421 break;
2422 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002423 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002424 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2425 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2426 if (bUseCommonFeatureMask)
2427 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2428 commonFeatureMask;
2429 else
2430 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2431 CAM_QCOM_FEATURE_NONE;
2432 } else {
2433 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2434 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2435 }
2436 break;
2437 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002438 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002439 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2440 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2441 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2442 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2443 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002444 /* Remove rotation if it is not supported
2445 for 4K LiveVideo snapshot case (online processing) */
2446 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2447 CAM_QCOM_FEATURE_ROTATION)) {
2448 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2449 &= ~CAM_QCOM_FEATURE_ROTATION;
2450 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002451 } else {
2452 if (bUseCommonFeatureMask &&
2453 isOnEncoder(maxViewfinderSize, newStream->width,
2454 newStream->height)) {
2455 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2456 } else {
2457 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2458 }
2459 }
2460 if (isZsl) {
2461 if (zslStream) {
2462 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2463 (int32_t)zslStream->width;
2464 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2465 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002466 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2467 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002468 } else {
2469 LOGE("Error, No ZSL stream identified");
2470 pthread_mutex_unlock(&mMutex);
2471 return -EINVAL;
2472 }
2473 } else if (m_bIs4KVideo) {
2474 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2475 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2476 } else if (bYuv888OverrideJpeg) {
2477 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2478 (int32_t)largeYuv888Size.width;
2479 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2480 (int32_t)largeYuv888Size.height;
2481 }
2482 break;
2483 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2484 case HAL_PIXEL_FORMAT_RAW16:
2485 case HAL_PIXEL_FORMAT_RAW10:
2486 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2487 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2488 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002489 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2490 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2491 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2492 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2493 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2494 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2495 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2496 gCamCapability[mCameraId]->dt[mPDIndex];
2497 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2498 gCamCapability[mCameraId]->vc[mPDIndex];
2499 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002500 break;
2501 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002502 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002503 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2504 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2505 break;
2506 }
2507 }
2508
2509 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2510 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2511 gCamCapability[mCameraId]->color_arrangement);
2512
2513 if (newStream->priv == NULL) {
2514 //New stream, construct channel
2515 switch (newStream->stream_type) {
2516 case CAMERA3_STREAM_INPUT:
2517 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2518 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2519 break;
2520 case CAMERA3_STREAM_BIDIRECTIONAL:
2521 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2522 GRALLOC_USAGE_HW_CAMERA_WRITE;
2523 break;
2524 case CAMERA3_STREAM_OUTPUT:
2525 /* For video encoding stream, set read/write rarely
2526 * flag so that they may be set to un-cached */
2527 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2528 newStream->usage |=
2529 (GRALLOC_USAGE_SW_READ_RARELY |
2530 GRALLOC_USAGE_SW_WRITE_RARELY |
2531 GRALLOC_USAGE_HW_CAMERA_WRITE);
2532 else if (IS_USAGE_ZSL(newStream->usage))
2533 {
2534 LOGD("ZSL usage flag skipping");
2535 }
2536 else if (newStream == zslStream
2537 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2538 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2539 } else
2540 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2541 break;
2542 default:
2543 LOGE("Invalid stream_type %d", newStream->stream_type);
2544 break;
2545 }
2546
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002547 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002548 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2549 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2550 QCamera3ProcessingChannel *channel = NULL;
2551 switch (newStream->format) {
2552 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2553 if ((newStream->usage &
2554 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2555 (streamList->operation_mode ==
2556 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2557 ) {
2558 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2559 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002560 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002561 this,
2562 newStream,
2563 (cam_stream_type_t)
2564 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2565 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2566 mMetadataChannel,
2567 0); //heap buffers are not required for HFR video channel
2568 if (channel == NULL) {
2569 LOGE("allocation of channel failed");
2570 pthread_mutex_unlock(&mMutex);
2571 return -ENOMEM;
2572 }
2573 //channel->getNumBuffers() will return 0 here so use
2574 //MAX_INFLIGH_HFR_REQUESTS
2575 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2576 newStream->priv = channel;
2577 LOGI("num video buffers in HFR mode: %d",
2578 MAX_INFLIGHT_HFR_REQUESTS);
2579 } else {
2580 /* Copy stream contents in HFR preview only case to create
2581 * dummy batch channel so that sensor streaming is in
2582 * HFR mode */
2583 if (!m_bIsVideo && (streamList->operation_mode ==
2584 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2585 mDummyBatchStream = *newStream;
2586 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002587 int bufferCount = MAX_INFLIGHT_REQUESTS;
2588 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2589 CAM_STREAM_TYPE_VIDEO) {
2590 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2591 bufferCount = MAX_VIDEO_BUFFERS;
2592 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002593 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2594 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002595 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002596 this,
2597 newStream,
2598 (cam_stream_type_t)
2599 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2600 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2601 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002602 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002603 if (channel == NULL) {
2604 LOGE("allocation of channel failed");
2605 pthread_mutex_unlock(&mMutex);
2606 return -ENOMEM;
2607 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002608 /* disable UBWC for preview, though supported,
2609 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002610 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002611 (previewSize.width == (int32_t)videoWidth)&&
2612 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002613 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002614 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002615 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002616 /* When goog_zoom is linked to the preview or video stream,
2617 * disable ubwc to the linked stream */
2618 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2619 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2620 channel->setUBWCEnabled(false);
2621 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002622 newStream->max_buffers = channel->getNumBuffers();
2623 newStream->priv = channel;
2624 }
2625 break;
2626 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2627 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2628 mChannelHandle,
2629 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002630 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002631 this,
2632 newStream,
2633 (cam_stream_type_t)
2634 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2635 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2636 mMetadataChannel);
2637 if (channel == NULL) {
2638 LOGE("allocation of YUV channel failed");
2639 pthread_mutex_unlock(&mMutex);
2640 return -ENOMEM;
2641 }
2642 newStream->max_buffers = channel->getNumBuffers();
2643 newStream->priv = channel;
2644 break;
2645 }
2646 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2647 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002648 case HAL_PIXEL_FORMAT_RAW10: {
2649 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2650 (HAL_DATASPACE_DEPTH != newStream->data_space))
2651 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002652 mRawChannel = new QCamera3RawChannel(
2653 mCameraHandle->camera_handle, mChannelHandle,
2654 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002655 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002656 this, newStream,
2657 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002658 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002659 if (mRawChannel == NULL) {
2660 LOGE("allocation of raw channel failed");
2661 pthread_mutex_unlock(&mMutex);
2662 return -ENOMEM;
2663 }
2664 newStream->max_buffers = mRawChannel->getNumBuffers();
2665 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2666 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002667 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002668 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002669 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2670 mDepthChannel = new QCamera3DepthChannel(
2671 mCameraHandle->camera_handle, mChannelHandle,
2672 mCameraHandle->ops, NULL, NULL, &padding_info,
2673 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2674 mMetadataChannel);
2675 if (NULL == mDepthChannel) {
2676 LOGE("Allocation of depth channel failed");
2677 pthread_mutex_unlock(&mMutex);
2678 return NO_MEMORY;
2679 }
2680 newStream->priv = mDepthChannel;
2681 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2682 } else {
2683 // Max live snapshot inflight buffer is 1. This is to mitigate
2684 // frame drop issues for video snapshot. The more buffers being
2685 // allocated, the more frame drops there are.
2686 mPictureChannel = new QCamera3PicChannel(
2687 mCameraHandle->camera_handle, mChannelHandle,
2688 mCameraHandle->ops, captureResultCb,
2689 setBufferErrorStatus, &padding_info, this, newStream,
2690 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2691 m_bIs4KVideo, isZsl, mMetadataChannel,
2692 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2693 if (mPictureChannel == NULL) {
2694 LOGE("allocation of channel failed");
2695 pthread_mutex_unlock(&mMutex);
2696 return -ENOMEM;
2697 }
2698 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2699 newStream->max_buffers = mPictureChannel->getNumBuffers();
2700 mPictureChannel->overrideYuvSize(
2701 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2702 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002703 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002704 break;
2705
2706 default:
2707 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002708 pthread_mutex_unlock(&mMutex);
2709 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002710 }
2711 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2712 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2713 } else {
2714 LOGE("Error, Unknown stream type");
2715 pthread_mutex_unlock(&mMutex);
2716 return -EINVAL;
2717 }
2718
2719 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002720 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002721 // Here we only care whether it's EIS3 or not
2722 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2723 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2724 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2725 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002726 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002727 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002728 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002729 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2730 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2731 }
2732 }
2733
2734 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2735 it != mStreamInfo.end(); it++) {
2736 if ((*it)->stream == newStream) {
2737 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2738 break;
2739 }
2740 }
2741 } else {
2742 // Channel already exists for this stream
2743 // Do nothing for now
2744 }
2745 padding_info = gCamCapability[mCameraId]->padding_info;
2746
Emilian Peev7650c122017-01-19 08:24:33 -08002747 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002748 * since there is no real stream associated with it
2749 */
Emilian Peev7650c122017-01-19 08:24:33 -08002750 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002751 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2752 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002753 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002754 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002755 }
2756
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002757 // Let buffer dispatcher know the configured streams.
2758 mOutputBufferDispatcher.configureStreams(streamList);
2759
Binhao Lincdb362a2017-04-20 13:31:54 -07002760 // By default, preview stream TNR is disabled.
2761 // Enable TNR to the preview stream if all conditions below are satisfied:
2762 // 1. resolution <= 1080p.
2763 // 2. preview resolution == video resolution.
2764 // 3. video stream TNR is enabled.
2765 // 4. EIS2.0
2766 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2767 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2768 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2769 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2770 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2771 video_stream->width == preview_stream->width &&
2772 video_stream->height == preview_stream->height) {
2773 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2774 CAM_QCOM_FEATURE_CPP_TNR;
2775 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2776 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2777 ~CAM_QCOM_FEATURE_CDS;
2778 }
2779 }
2780
Thierry Strudel2896d122017-02-23 19:18:03 -08002781 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2782 onlyRaw = false;
2783 }
2784
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002785 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002786 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002787 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002788 cam_analysis_info_t analysisInfo;
2789 int32_t ret = NO_ERROR;
2790 ret = mCommon.getAnalysisInfo(
2791 FALSE,
2792 analysisFeatureMask,
2793 &analysisInfo);
2794 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002795 cam_color_filter_arrangement_t analysis_color_arrangement =
2796 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2797 CAM_FILTER_ARRANGEMENT_Y :
2798 gCamCapability[mCameraId]->color_arrangement);
2799 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2800 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002801 cam_dimension_t analysisDim;
2802 analysisDim = mCommon.getMatchingDimension(previewSize,
2803 analysisInfo.analysis_recommended_res);
2804
2805 mAnalysisChannel = new QCamera3SupportChannel(
2806 mCameraHandle->camera_handle,
2807 mChannelHandle,
2808 mCameraHandle->ops,
2809 &analysisInfo.analysis_padding_info,
2810 analysisFeatureMask,
2811 CAM_STREAM_TYPE_ANALYSIS,
2812 &analysisDim,
2813 (analysisInfo.analysis_format
2814 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2815 : CAM_FORMAT_YUV_420_NV21),
2816 analysisInfo.hw_analysis_supported,
2817 gCamCapability[mCameraId]->color_arrangement,
2818 this,
2819 0); // force buffer count to 0
2820 } else {
2821 LOGW("getAnalysisInfo failed, ret = %d", ret);
2822 }
2823 if (!mAnalysisChannel) {
2824 LOGW("Analysis channel cannot be created");
2825 }
2826 }
2827
Thierry Strudel3d639192016-09-09 11:52:26 -07002828 //RAW DUMP channel
2829 if (mEnableRawDump && isRawStreamRequested == false){
2830 cam_dimension_t rawDumpSize;
2831 rawDumpSize = getMaxRawSize(mCameraId);
2832 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2833 setPAAFSupport(rawDumpFeatureMask,
2834 CAM_STREAM_TYPE_RAW,
2835 gCamCapability[mCameraId]->color_arrangement);
2836 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2837 mChannelHandle,
2838 mCameraHandle->ops,
2839 rawDumpSize,
2840 &padding_info,
2841 this, rawDumpFeatureMask);
2842 if (!mRawDumpChannel) {
2843 LOGE("Raw Dump channel cannot be created");
2844 pthread_mutex_unlock(&mMutex);
2845 return -ENOMEM;
2846 }
2847 }
2848
Thierry Strudel3d639192016-09-09 11:52:26 -07002849 if (mAnalysisChannel) {
2850 cam_analysis_info_t analysisInfo;
2851 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2852 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2853 CAM_STREAM_TYPE_ANALYSIS;
2854 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2855 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002856 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002857 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2858 &analysisInfo);
2859 if (rc != NO_ERROR) {
2860 LOGE("getAnalysisInfo failed, ret = %d", rc);
2861 pthread_mutex_unlock(&mMutex);
2862 return rc;
2863 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002864 cam_color_filter_arrangement_t analysis_color_arrangement =
2865 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2866 CAM_FILTER_ARRANGEMENT_Y :
2867 gCamCapability[mCameraId]->color_arrangement);
2868 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2869 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2870 analysis_color_arrangement);
2871
Thierry Strudel3d639192016-09-09 11:52:26 -07002872 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002873 mCommon.getMatchingDimension(previewSize,
2874 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002875 mStreamConfigInfo.num_streams++;
2876 }
2877
Thierry Strudel2896d122017-02-23 19:18:03 -08002878 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002879 cam_analysis_info_t supportInfo;
2880 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2881 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2882 setPAAFSupport(callbackFeatureMask,
2883 CAM_STREAM_TYPE_CALLBACK,
2884 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002885 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002886 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002887 if (ret != NO_ERROR) {
2888 /* Ignore the error for Mono camera
2889 * because the PAAF bit mask is only set
2890 * for CAM_STREAM_TYPE_ANALYSIS stream type
2891 */
2892 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2893 LOGW("getAnalysisInfo failed, ret = %d", ret);
2894 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002895 }
2896 mSupportChannel = new QCamera3SupportChannel(
2897 mCameraHandle->camera_handle,
2898 mChannelHandle,
2899 mCameraHandle->ops,
2900 &gCamCapability[mCameraId]->padding_info,
2901 callbackFeatureMask,
2902 CAM_STREAM_TYPE_CALLBACK,
2903 &QCamera3SupportChannel::kDim,
2904 CAM_FORMAT_YUV_420_NV21,
2905 supportInfo.hw_analysis_supported,
2906 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002907 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002908 if (!mSupportChannel) {
2909 LOGE("dummy channel cannot be created");
2910 pthread_mutex_unlock(&mMutex);
2911 return -ENOMEM;
2912 }
2913 }
2914
2915 if (mSupportChannel) {
2916 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2917 QCamera3SupportChannel::kDim;
2918 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2919 CAM_STREAM_TYPE_CALLBACK;
2920 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2921 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2922 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2923 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2924 gCamCapability[mCameraId]->color_arrangement);
2925 mStreamConfigInfo.num_streams++;
2926 }
2927
2928 if (mRawDumpChannel) {
2929 cam_dimension_t rawSize;
2930 rawSize = getMaxRawSize(mCameraId);
2931 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2932 rawSize;
2933 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2934 CAM_STREAM_TYPE_RAW;
2935 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2936 CAM_QCOM_FEATURE_NONE;
2937 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2938 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2939 gCamCapability[mCameraId]->color_arrangement);
2940 mStreamConfigInfo.num_streams++;
2941 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002942
2943 if (mHdrPlusRawSrcChannel) {
2944 cam_dimension_t rawSize;
2945 rawSize = getMaxRawSize(mCameraId);
2946 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2947 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2948 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2949 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2950 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2951 gCamCapability[mCameraId]->color_arrangement);
2952 mStreamConfigInfo.num_streams++;
2953 }
2954
Thierry Strudel3d639192016-09-09 11:52:26 -07002955 /* In HFR mode, if video stream is not added, create a dummy channel so that
2956 * ISP can create a batch mode even for preview only case. This channel is
2957 * never 'start'ed (no stream-on), it is only 'initialized' */
2958 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2959 !m_bIsVideo) {
2960 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2961 setPAAFSupport(dummyFeatureMask,
2962 CAM_STREAM_TYPE_VIDEO,
2963 gCamCapability[mCameraId]->color_arrangement);
2964 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2965 mChannelHandle,
2966 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002967 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002968 this,
2969 &mDummyBatchStream,
2970 CAM_STREAM_TYPE_VIDEO,
2971 dummyFeatureMask,
2972 mMetadataChannel);
2973 if (NULL == mDummyBatchChannel) {
2974 LOGE("creation of mDummyBatchChannel failed."
2975 "Preview will use non-hfr sensor mode ");
2976 }
2977 }
2978 if (mDummyBatchChannel) {
2979 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2980 mDummyBatchStream.width;
2981 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2982 mDummyBatchStream.height;
2983 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2984 CAM_STREAM_TYPE_VIDEO;
2985 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2986 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2987 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2988 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2989 gCamCapability[mCameraId]->color_arrangement);
2990 mStreamConfigInfo.num_streams++;
2991 }
2992
2993 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2994 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002995 m_bIs4KVideo ? 0 :
2996 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002997
2998 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2999 for (pendingRequestIterator i = mPendingRequestsList.begin();
3000 i != mPendingRequestsList.end();) {
3001 i = erasePendingRequest(i);
3002 }
3003 mPendingFrameDropList.clear();
3004 // Initialize/Reset the pending buffers list
3005 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3006 req.mPendingBufferList.clear();
3007 }
3008 mPendingBuffersMap.mPendingBuffersInRequest.clear();
3009
Thierry Strudel3d639192016-09-09 11:52:26 -07003010 mCurJpegMeta.clear();
3011 //Get min frame duration for this streams configuration
3012 deriveMinFrameDuration();
3013
Chien-Yu Chenee335912017-02-09 17:53:20 -08003014 mFirstPreviewIntentSeen = false;
3015
3016 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003017 {
3018 Mutex::Autolock l(gHdrPlusClientLock);
3019 disableHdrPlusModeLocked();
3020 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003021
Thierry Strudel3d639192016-09-09 11:52:26 -07003022 // Update state
3023 mState = CONFIGURED;
3024
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003025 mFirstMetadataCallback = true;
3026
Thierry Strudel3d639192016-09-09 11:52:26 -07003027 pthread_mutex_unlock(&mMutex);
3028
3029 return rc;
3030}
3031
3032/*===========================================================================
3033 * FUNCTION : validateCaptureRequest
3034 *
3035 * DESCRIPTION: validate a capture request from camera service
3036 *
3037 * PARAMETERS :
3038 * @request : request from framework to process
3039 *
3040 * RETURN :
3041 *
3042 *==========================================================================*/
3043int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003044 camera3_capture_request_t *request,
3045 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003046{
3047 ssize_t idx = 0;
3048 const camera3_stream_buffer_t *b;
3049 CameraMetadata meta;
3050
3051 /* Sanity check the request */
3052 if (request == NULL) {
3053 LOGE("NULL capture request");
3054 return BAD_VALUE;
3055 }
3056
3057 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3058 /*settings cannot be null for the first request*/
3059 return BAD_VALUE;
3060 }
3061
3062 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003063 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3064 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003065 LOGE("Request %d: No output buffers provided!",
3066 __FUNCTION__, frameNumber);
3067 return BAD_VALUE;
3068 }
3069 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3070 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3071 request->num_output_buffers, MAX_NUM_STREAMS);
3072 return BAD_VALUE;
3073 }
3074 if (request->input_buffer != NULL) {
3075 b = request->input_buffer;
3076 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3077 LOGE("Request %d: Buffer %ld: Status not OK!",
3078 frameNumber, (long)idx);
3079 return BAD_VALUE;
3080 }
3081 if (b->release_fence != -1) {
3082 LOGE("Request %d: Buffer %ld: Has a release fence!",
3083 frameNumber, (long)idx);
3084 return BAD_VALUE;
3085 }
3086 if (b->buffer == NULL) {
3087 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3088 frameNumber, (long)idx);
3089 return BAD_VALUE;
3090 }
3091 }
3092
3093 // Validate all buffers
3094 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003095 if (b == NULL) {
3096 return BAD_VALUE;
3097 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003098 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003099 QCamera3ProcessingChannel *channel =
3100 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3101 if (channel == NULL) {
3102 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3103 frameNumber, (long)idx);
3104 return BAD_VALUE;
3105 }
3106 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3107 LOGE("Request %d: Buffer %ld: Status not OK!",
3108 frameNumber, (long)idx);
3109 return BAD_VALUE;
3110 }
3111 if (b->release_fence != -1) {
3112 LOGE("Request %d: Buffer %ld: Has a release fence!",
3113 frameNumber, (long)idx);
3114 return BAD_VALUE;
3115 }
3116 if (b->buffer == NULL) {
3117 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3118 frameNumber, (long)idx);
3119 return BAD_VALUE;
3120 }
3121 if (*(b->buffer) == NULL) {
3122 LOGE("Request %d: Buffer %ld: NULL private handle!",
3123 frameNumber, (long)idx);
3124 return BAD_VALUE;
3125 }
3126 idx++;
3127 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003128 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003129 return NO_ERROR;
3130}
3131
3132/*===========================================================================
3133 * FUNCTION : deriveMinFrameDuration
3134 *
3135 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3136 * on currently configured streams.
3137 *
3138 * PARAMETERS : NONE
3139 *
3140 * RETURN : NONE
3141 *
3142 *==========================================================================*/
3143void QCamera3HardwareInterface::deriveMinFrameDuration()
3144{
3145 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3146
3147 maxJpegDim = 0;
3148 maxProcessedDim = 0;
3149 maxRawDim = 0;
3150
3151 // Figure out maximum jpeg, processed, and raw dimensions
3152 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3153 it != mStreamInfo.end(); it++) {
3154
3155 // Input stream doesn't have valid stream_type
3156 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3157 continue;
3158
3159 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3160 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3161 if (dimension > maxJpegDim)
3162 maxJpegDim = dimension;
3163 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3164 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3165 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3166 if (dimension > maxRawDim)
3167 maxRawDim = dimension;
3168 } else {
3169 if (dimension > maxProcessedDim)
3170 maxProcessedDim = dimension;
3171 }
3172 }
3173
3174 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3175 MAX_SIZES_CNT);
3176
3177 //Assume all jpeg dimensions are in processed dimensions.
3178 if (maxJpegDim > maxProcessedDim)
3179 maxProcessedDim = maxJpegDim;
3180 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3181 if (maxProcessedDim > maxRawDim) {
3182 maxRawDim = INT32_MAX;
3183
3184 for (size_t i = 0; i < count; i++) {
3185 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3186 gCamCapability[mCameraId]->raw_dim[i].height;
3187 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3188 maxRawDim = dimension;
3189 }
3190 }
3191
3192 //Find minimum durations for processed, jpeg, and raw
3193 for (size_t i = 0; i < count; i++) {
3194 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3195 gCamCapability[mCameraId]->raw_dim[i].height) {
3196 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3197 break;
3198 }
3199 }
3200 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3201 for (size_t i = 0; i < count; i++) {
3202 if (maxProcessedDim ==
3203 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3204 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3205 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3206 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3207 break;
3208 }
3209 }
3210}
3211
3212/*===========================================================================
3213 * FUNCTION : getMinFrameDuration
3214 *
3215 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3216 * and current request configuration.
3217 *
3218 * PARAMETERS : @request: requset sent by the frameworks
3219 *
3220 * RETURN : min farme duration for a particular request
3221 *
3222 *==========================================================================*/
3223int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3224{
3225 bool hasJpegStream = false;
3226 bool hasRawStream = false;
3227 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3228 const camera3_stream_t *stream = request->output_buffers[i].stream;
3229 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3230 hasJpegStream = true;
3231 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3232 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3233 stream->format == HAL_PIXEL_FORMAT_RAW16)
3234 hasRawStream = true;
3235 }
3236
3237 if (!hasJpegStream)
3238 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3239 else
3240 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3241}
3242
3243/*===========================================================================
3244 * FUNCTION : handleBuffersDuringFlushLock
3245 *
3246 * DESCRIPTION: Account for buffers returned from back-end during flush
3247 * This function is executed while mMutex is held by the caller.
3248 *
3249 * PARAMETERS :
3250 * @buffer: image buffer for the callback
3251 *
3252 * RETURN :
3253 *==========================================================================*/
3254void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3255{
3256 bool buffer_found = false;
3257 for (List<PendingBuffersInRequest>::iterator req =
3258 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3259 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3260 for (List<PendingBufferInfo>::iterator i =
3261 req->mPendingBufferList.begin();
3262 i != req->mPendingBufferList.end(); i++) {
3263 if (i->buffer == buffer->buffer) {
3264 mPendingBuffersMap.numPendingBufsAtFlush--;
3265 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3266 buffer->buffer, req->frame_number,
3267 mPendingBuffersMap.numPendingBufsAtFlush);
3268 buffer_found = true;
3269 break;
3270 }
3271 }
3272 if (buffer_found) {
3273 break;
3274 }
3275 }
3276 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3277 //signal the flush()
3278 LOGD("All buffers returned to HAL. Continue flush");
3279 pthread_cond_signal(&mBuffersCond);
3280 }
3281}
3282
Thierry Strudel3d639192016-09-09 11:52:26 -07003283/*===========================================================================
3284 * FUNCTION : handleBatchMetadata
3285 *
3286 * DESCRIPTION: Handles metadata buffer callback in batch mode
3287 *
3288 * PARAMETERS : @metadata_buf: metadata buffer
3289 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3290 * the meta buf in this method
3291 *
3292 * RETURN :
3293 *
3294 *==========================================================================*/
3295void QCamera3HardwareInterface::handleBatchMetadata(
3296 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3297{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003298 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003299
3300 if (NULL == metadata_buf) {
3301 LOGE("metadata_buf is NULL");
3302 return;
3303 }
3304 /* In batch mode, the metdata will contain the frame number and timestamp of
3305 * the last frame in the batch. Eg: a batch containing buffers from request
3306 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3307 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3308 * multiple process_capture_results */
3309 metadata_buffer_t *metadata =
3310 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3311 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3312 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3313 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3314 uint32_t frame_number = 0, urgent_frame_number = 0;
3315 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3316 bool invalid_metadata = false;
3317 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3318 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003319 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003320
3321 int32_t *p_frame_number_valid =
3322 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3323 uint32_t *p_frame_number =
3324 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3325 int64_t *p_capture_time =
3326 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3327 int32_t *p_urgent_frame_number_valid =
3328 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3329 uint32_t *p_urgent_frame_number =
3330 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3331
3332 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3333 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3334 (NULL == p_urgent_frame_number)) {
3335 LOGE("Invalid metadata");
3336 invalid_metadata = true;
3337 } else {
3338 frame_number_valid = *p_frame_number_valid;
3339 last_frame_number = *p_frame_number;
3340 last_frame_capture_time = *p_capture_time;
3341 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3342 last_urgent_frame_number = *p_urgent_frame_number;
3343 }
3344
3345 /* In batchmode, when no video buffers are requested, set_parms are sent
3346 * for every capture_request. The difference between consecutive urgent
3347 * frame numbers and frame numbers should be used to interpolate the
3348 * corresponding frame numbers and time stamps */
3349 pthread_mutex_lock(&mMutex);
3350 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003351 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3352 if(idx < 0) {
3353 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3354 last_urgent_frame_number);
3355 mState = ERROR;
3356 pthread_mutex_unlock(&mMutex);
3357 return;
3358 }
3359 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003360 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3361 first_urgent_frame_number;
3362
3363 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3364 urgent_frame_number_valid,
3365 first_urgent_frame_number, last_urgent_frame_number);
3366 }
3367
3368 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003369 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3370 if(idx < 0) {
3371 LOGE("Invalid frame number received: %d. Irrecoverable error",
3372 last_frame_number);
3373 mState = ERROR;
3374 pthread_mutex_unlock(&mMutex);
3375 return;
3376 }
3377 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003378 frameNumDiff = last_frame_number + 1 -
3379 first_frame_number;
3380 mPendingBatchMap.removeItem(last_frame_number);
3381
3382 LOGD("frm: valid: %d frm_num: %d - %d",
3383 frame_number_valid,
3384 first_frame_number, last_frame_number);
3385
3386 }
3387 pthread_mutex_unlock(&mMutex);
3388
3389 if (urgent_frame_number_valid || frame_number_valid) {
3390 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3391 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3392 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3393 urgentFrameNumDiff, last_urgent_frame_number);
3394 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3395 LOGE("frameNumDiff: %d frameNum: %d",
3396 frameNumDiff, last_frame_number);
3397 }
3398
3399 for (size_t i = 0; i < loopCount; i++) {
3400 /* handleMetadataWithLock is called even for invalid_metadata for
3401 * pipeline depth calculation */
3402 if (!invalid_metadata) {
3403 /* Infer frame number. Batch metadata contains frame number of the
3404 * last frame */
3405 if (urgent_frame_number_valid) {
3406 if (i < urgentFrameNumDiff) {
3407 urgent_frame_number =
3408 first_urgent_frame_number + i;
3409 LOGD("inferred urgent frame_number: %d",
3410 urgent_frame_number);
3411 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3412 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3413 } else {
3414 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3415 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3416 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3417 }
3418 }
3419
3420 /* Infer frame number. Batch metadata contains frame number of the
3421 * last frame */
3422 if (frame_number_valid) {
3423 if (i < frameNumDiff) {
3424 frame_number = first_frame_number + i;
3425 LOGD("inferred frame_number: %d", frame_number);
3426 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3427 CAM_INTF_META_FRAME_NUMBER, frame_number);
3428 } else {
3429 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3430 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3431 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3432 }
3433 }
3434
3435 if (last_frame_capture_time) {
3436 //Infer timestamp
3437 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003438 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003439 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003440 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003441 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3442 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3443 LOGD("batch capture_time: %lld, capture_time: %lld",
3444 last_frame_capture_time, capture_time);
3445 }
3446 }
3447 pthread_mutex_lock(&mMutex);
3448 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003449 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003450 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3451 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003452 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003453 pthread_mutex_unlock(&mMutex);
3454 }
3455
3456 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003457 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003458 mMetadataChannel->bufDone(metadata_buf);
3459 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003460 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003461 }
3462}
3463
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003464void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3465 camera3_error_msg_code_t errorCode)
3466{
3467 camera3_notify_msg_t notify_msg;
3468 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3469 notify_msg.type = CAMERA3_MSG_ERROR;
3470 notify_msg.message.error.error_code = errorCode;
3471 notify_msg.message.error.error_stream = NULL;
3472 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003473 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003474
3475 return;
3476}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003477
3478/*===========================================================================
3479 * FUNCTION : sendPartialMetadataWithLock
3480 *
3481 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3482 *
3483 * PARAMETERS : @metadata: metadata buffer
3484 * @requestIter: The iterator for the pending capture request for
3485 * which the partial result is being sen
3486 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3487 * last urgent metadata in a batch. Always true for non-batch mode
3488 *
3489 * RETURN :
3490 *
3491 *==========================================================================*/
3492
3493void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3494 metadata_buffer_t *metadata,
3495 const pendingRequestIterator requestIter,
3496 bool lastUrgentMetadataInBatch)
3497{
3498 camera3_capture_result_t result;
3499 memset(&result, 0, sizeof(camera3_capture_result_t));
3500
3501 requestIter->partial_result_cnt++;
3502
3503 // Extract 3A metadata
3504 result.result = translateCbUrgentMetadataToResultMetadata(
3505 metadata, lastUrgentMetadataInBatch);
3506 // Populate metadata result
3507 result.frame_number = requestIter->frame_number;
3508 result.num_output_buffers = 0;
3509 result.output_buffers = NULL;
3510 result.partial_result = requestIter->partial_result_cnt;
3511
3512 {
3513 Mutex::Autolock l(gHdrPlusClientLock);
3514 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3515 // Notify HDR+ client about the partial metadata.
3516 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3517 result.partial_result == PARTIAL_RESULT_COUNT);
3518 }
3519 }
3520
3521 orchestrateResult(&result);
3522 LOGD("urgent frame_number = %u", result.frame_number);
3523 free_camera_metadata((camera_metadata_t *)result.result);
3524}
3525
Thierry Strudel3d639192016-09-09 11:52:26 -07003526/*===========================================================================
3527 * FUNCTION : handleMetadataWithLock
3528 *
3529 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3530 *
3531 * PARAMETERS : @metadata_buf: metadata buffer
3532 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3533 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003534 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3535 * last urgent metadata in a batch. Always true for non-batch mode
3536 * @lastMetadataInBatch: Boolean to indicate whether this is the
3537 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003538 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3539 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003540 *
3541 * RETURN :
3542 *
3543 *==========================================================================*/
3544void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003545 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003546 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3547 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003548{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003549 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003550 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3551 //during flush do not send metadata from this thread
3552 LOGD("not sending metadata during flush or when mState is error");
3553 if (free_and_bufdone_meta_buf) {
3554 mMetadataChannel->bufDone(metadata_buf);
3555 free(metadata_buf);
3556 }
3557 return;
3558 }
3559
3560 //not in flush
3561 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3562 int32_t frame_number_valid, urgent_frame_number_valid;
3563 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003564 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003565 nsecs_t currentSysTime;
3566
3567 int32_t *p_frame_number_valid =
3568 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3569 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3570 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003571 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003572 int32_t *p_urgent_frame_number_valid =
3573 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3574 uint32_t *p_urgent_frame_number =
3575 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3576 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3577 metadata) {
3578 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3579 *p_frame_number_valid, *p_frame_number);
3580 }
3581
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003582 camera_metadata_t *resultMetadata = nullptr;
3583
Thierry Strudel3d639192016-09-09 11:52:26 -07003584 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3585 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3586 LOGE("Invalid metadata");
3587 if (free_and_bufdone_meta_buf) {
3588 mMetadataChannel->bufDone(metadata_buf);
3589 free(metadata_buf);
3590 }
3591 goto done_metadata;
3592 }
3593 frame_number_valid = *p_frame_number_valid;
3594 frame_number = *p_frame_number;
3595 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003596 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003597 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3598 urgent_frame_number = *p_urgent_frame_number;
3599 currentSysTime = systemTime(CLOCK_MONOTONIC);
3600
Jason Lee603176d2017-05-31 11:43:27 -07003601 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3602 const int tries = 3;
3603 nsecs_t bestGap, measured;
3604 for (int i = 0; i < tries; ++i) {
3605 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3606 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3607 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3608 const nsecs_t gap = tmono2 - tmono;
3609 if (i == 0 || gap < bestGap) {
3610 bestGap = gap;
3611 measured = tbase - ((tmono + tmono2) >> 1);
3612 }
3613 }
3614 capture_time -= measured;
3615 }
3616
Thierry Strudel3d639192016-09-09 11:52:26 -07003617 // Detect if buffers from any requests are overdue
3618 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003619 int64_t timeout;
3620 {
3621 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3622 // If there is a pending HDR+ request, the following requests may be blocked until the
3623 // HDR+ request is done. So allow a longer timeout.
3624 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3625 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3626 }
3627
3628 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003629 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003630 assert(missed.stream->priv);
3631 if (missed.stream->priv) {
3632 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3633 assert(ch->mStreams[0]);
3634 if (ch->mStreams[0]) {
3635 LOGE("Cancel missing frame = %d, buffer = %p,"
3636 "stream type = %d, stream format = %d",
3637 req.frame_number, missed.buffer,
3638 ch->mStreams[0]->getMyType(), missed.stream->format);
3639 ch->timeoutFrame(req.frame_number);
3640 }
3641 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003642 }
3643 }
3644 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003645 //For the very first metadata callback, regardless whether it contains valid
3646 //frame number, send the partial metadata for the jumpstarting requests.
3647 //Note that this has to be done even if the metadata doesn't contain valid
3648 //urgent frame number, because in the case only 1 request is ever submitted
3649 //to HAL, there won't be subsequent valid urgent frame number.
3650 if (mFirstMetadataCallback) {
3651 for (pendingRequestIterator i =
3652 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3653 if (i->bUseFirstPartial) {
3654 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3655 }
3656 }
3657 mFirstMetadataCallback = false;
3658 }
3659
Thierry Strudel3d639192016-09-09 11:52:26 -07003660 //Partial result on process_capture_result for timestamp
3661 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003662 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003663
3664 //Recieved an urgent Frame Number, handle it
3665 //using partial results
3666 for (pendingRequestIterator i =
3667 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3668 LOGD("Iterator Frame = %d urgent frame = %d",
3669 i->frame_number, urgent_frame_number);
3670
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003671 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003672 (i->partial_result_cnt == 0)) {
3673 LOGE("Error: HAL missed urgent metadata for frame number %d",
3674 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003675 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003676 }
3677
3678 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003679 i->partial_result_cnt == 0) {
3680 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003681 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3682 // Instant AEC settled for this frame.
3683 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3684 mInstantAECSettledFrameNumber = urgent_frame_number;
3685 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003686 break;
3687 }
3688 }
3689 }
3690
3691 if (!frame_number_valid) {
3692 LOGD("Not a valid normal frame number, used as SOF only");
3693 if (free_and_bufdone_meta_buf) {
3694 mMetadataChannel->bufDone(metadata_buf);
3695 free(metadata_buf);
3696 }
3697 goto done_metadata;
3698 }
3699 LOGH("valid frame_number = %u, capture_time = %lld",
3700 frame_number, capture_time);
3701
Emilian Peev7650c122017-01-19 08:24:33 -08003702 if (metadata->is_depth_data_valid) {
3703 handleDepthDataLocked(metadata->depth_data, frame_number);
3704 }
3705
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003706 // Check whether any stream buffer corresponding to this is dropped or not
3707 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3708 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3709 for (auto & pendingRequest : mPendingRequestsList) {
3710 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3711 mInstantAECSettledFrameNumber)) {
3712 camera3_notify_msg_t notify_msg = {};
3713 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003714 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003715 QCamera3ProcessingChannel *channel =
3716 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003717 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003718 if (p_cam_frame_drop) {
3719 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003720 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003721 // Got the stream ID for drop frame.
3722 dropFrame = true;
3723 break;
3724 }
3725 }
3726 } else {
3727 // This is instant AEC case.
3728 // For instant AEC drop the stream untill AEC is settled.
3729 dropFrame = true;
3730 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003731
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003732 if (dropFrame) {
3733 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3734 if (p_cam_frame_drop) {
3735 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003736 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003737 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003738 } else {
3739 // For instant AEC, inform frame drop and frame number
3740 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3741 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003742 pendingRequest.frame_number, streamID,
3743 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003744 }
3745 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003746 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003747 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003748 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003749 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003750 if (p_cam_frame_drop) {
3751 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003752 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003753 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003754 } else {
3755 // For instant AEC, inform frame drop and frame number
3756 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3757 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003758 pendingRequest.frame_number, streamID,
3759 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003760 }
3761 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003762 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003763 PendingFrameDrop.stream_ID = streamID;
3764 // Add the Frame drop info to mPendingFrameDropList
3765 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003766 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003767 }
3768 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003769 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003770
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003771 for (auto & pendingRequest : mPendingRequestsList) {
3772 // Find the pending request with the frame number.
3773 if (pendingRequest.frame_number == frame_number) {
3774 // Update the sensor timestamp.
3775 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003776
Thierry Strudel3d639192016-09-09 11:52:26 -07003777
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003778 /* Set the timestamp in display metadata so that clients aware of
3779 private_handle such as VT can use this un-modified timestamps.
3780 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003781 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003782
Thierry Strudel3d639192016-09-09 11:52:26 -07003783 // Find channel requiring metadata, meaning internal offline postprocess
3784 // is needed.
3785 //TODO: for now, we don't support two streams requiring metadata at the same time.
3786 // (because we are not making copies, and metadata buffer is not reference counted.
3787 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003788 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3789 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003790 if (iter->need_metadata) {
3791 internalPproc = true;
3792 QCamera3ProcessingChannel *channel =
3793 (QCamera3ProcessingChannel *)iter->stream->priv;
3794 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003795 if(p_is_metabuf_queued != NULL) {
3796 *p_is_metabuf_queued = true;
3797 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003798 break;
3799 }
3800 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003801 for (auto itr = pendingRequest.internalRequestList.begin();
3802 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003803 if (itr->need_metadata) {
3804 internalPproc = true;
3805 QCamera3ProcessingChannel *channel =
3806 (QCamera3ProcessingChannel *)itr->stream->priv;
3807 channel->queueReprocMetadata(metadata_buf);
3808 break;
3809 }
3810 }
3811
Thierry Strudel54dc9782017-02-15 12:12:10 -08003812 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003813
3814 bool *enableZsl = nullptr;
3815 if (gExposeEnableZslKey) {
3816 enableZsl = &pendingRequest.enableZsl;
3817 }
3818
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003819 resultMetadata = translateFromHalMetadata(metadata,
3820 pendingRequest.timestamp, pendingRequest.request_id,
3821 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3822 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003823 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003824 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003825 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003826 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003827 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003828 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003829
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003830 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003831
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003832 if (pendingRequest.blob_request) {
3833 //Dump tuning metadata if enabled and available
3834 char prop[PROPERTY_VALUE_MAX];
3835 memset(prop, 0, sizeof(prop));
3836 property_get("persist.camera.dumpmetadata", prop, "0");
3837 int32_t enabled = atoi(prop);
3838 if (enabled && metadata->is_tuning_params_valid) {
3839 dumpMetadataToFile(metadata->tuning_params,
3840 mMetaFrameCount,
3841 enabled,
3842 "Snapshot",
3843 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003844 }
3845 }
3846
3847 if (!internalPproc) {
3848 LOGD("couldn't find need_metadata for this metadata");
3849 // Return metadata buffer
3850 if (free_and_bufdone_meta_buf) {
3851 mMetadataChannel->bufDone(metadata_buf);
3852 free(metadata_buf);
3853 }
3854 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003855
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003856 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003857 }
3858 }
3859
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003860 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3861
3862 // Try to send out capture result metadata.
3863 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003864 return;
3865
Thierry Strudel3d639192016-09-09 11:52:26 -07003866done_metadata:
3867 for (pendingRequestIterator i = mPendingRequestsList.begin();
3868 i != mPendingRequestsList.end() ;i++) {
3869 i->pipeline_depth++;
3870 }
3871 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3872 unblockRequestIfNecessary();
3873}
3874
3875/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003876 * FUNCTION : handleDepthDataWithLock
3877 *
3878 * DESCRIPTION: Handles incoming depth data
3879 *
3880 * PARAMETERS : @depthData : Depth data
3881 * @frameNumber: Frame number of the incoming depth data
3882 *
3883 * RETURN :
3884 *
3885 *==========================================================================*/
3886void QCamera3HardwareInterface::handleDepthDataLocked(
3887 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3888 uint32_t currentFrameNumber;
3889 buffer_handle_t *depthBuffer;
3890
3891 if (nullptr == mDepthChannel) {
3892 LOGE("Depth channel not present!");
3893 return;
3894 }
3895
3896 camera3_stream_buffer_t resultBuffer =
3897 {.acquire_fence = -1,
3898 .release_fence = -1,
3899 .status = CAMERA3_BUFFER_STATUS_OK,
3900 .buffer = nullptr,
3901 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003902 do {
3903 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3904 if (nullptr == depthBuffer) {
3905 break;
3906 }
3907
Emilian Peev7650c122017-01-19 08:24:33 -08003908 resultBuffer.buffer = depthBuffer;
3909 if (currentFrameNumber == frameNumber) {
3910 int32_t rc = mDepthChannel->populateDepthData(depthData,
3911 frameNumber);
3912 if (NO_ERROR != rc) {
3913 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3914 } else {
3915 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3916 }
3917 } else if (currentFrameNumber > frameNumber) {
3918 break;
3919 } else {
3920 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3921 {{currentFrameNumber, mDepthChannel->getStream(),
3922 CAMERA3_MSG_ERROR_BUFFER}}};
3923 orchestrateNotify(&notify_msg);
3924
3925 LOGE("Depth buffer for frame number: %d is missing "
3926 "returning back!", currentFrameNumber);
3927 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3928 }
3929 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003930 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003931 } while (currentFrameNumber < frameNumber);
3932}
3933
3934/*===========================================================================
3935 * FUNCTION : notifyErrorFoPendingDepthData
3936 *
3937 * DESCRIPTION: Returns error for any pending depth buffers
3938 *
3939 * PARAMETERS : depthCh - depth channel that needs to get flushed
3940 *
3941 * RETURN :
3942 *
3943 *==========================================================================*/
3944void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3945 QCamera3DepthChannel *depthCh) {
3946 uint32_t currentFrameNumber;
3947 buffer_handle_t *depthBuffer;
3948
3949 if (nullptr == depthCh) {
3950 return;
3951 }
3952
3953 camera3_notify_msg_t notify_msg =
3954 {.type = CAMERA3_MSG_ERROR,
3955 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3956 camera3_stream_buffer_t resultBuffer =
3957 {.acquire_fence = -1,
3958 .release_fence = -1,
3959 .buffer = nullptr,
3960 .stream = depthCh->getStream(),
3961 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003962
3963 while (nullptr !=
3964 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3965 depthCh->unmapBuffer(currentFrameNumber);
3966
3967 notify_msg.message.error.frame_number = currentFrameNumber;
3968 orchestrateNotify(&notify_msg);
3969
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003970 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003971 };
3972}
3973
3974/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003975 * FUNCTION : hdrPlusPerfLock
3976 *
3977 * DESCRIPTION: perf lock for HDR+ using custom intent
3978 *
3979 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3980 *
3981 * RETURN : None
3982 *
3983 *==========================================================================*/
3984void QCamera3HardwareInterface::hdrPlusPerfLock(
3985 mm_camera_super_buf_t *metadata_buf)
3986{
3987 if (NULL == metadata_buf) {
3988 LOGE("metadata_buf is NULL");
3989 return;
3990 }
3991 metadata_buffer_t *metadata =
3992 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3993 int32_t *p_frame_number_valid =
3994 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3995 uint32_t *p_frame_number =
3996 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3997
3998 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3999 LOGE("%s: Invalid metadata", __func__);
4000 return;
4001 }
4002
4003 //acquire perf lock for 5 sec after the last HDR frame is captured
4004 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4005 if ((p_frame_number != NULL) &&
4006 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004007 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004008 }
4009 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004010}
4011
4012/*===========================================================================
4013 * FUNCTION : handleInputBufferWithLock
4014 *
4015 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4016 *
4017 * PARAMETERS : @frame_number: frame number of the input buffer
4018 *
4019 * RETURN :
4020 *
4021 *==========================================================================*/
4022void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4023{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004024 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004025 pendingRequestIterator i = mPendingRequestsList.begin();
4026 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4027 i++;
4028 }
4029 if (i != mPendingRequestsList.end() && i->input_buffer) {
4030 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004031 CameraMetadata settings;
4032 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4033 if(i->settings) {
4034 settings = i->settings;
4035 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4036 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004037 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004038 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004039 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004040 } else {
4041 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004042 }
4043
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004044 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4045 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4046 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004047
4048 camera3_capture_result result;
4049 memset(&result, 0, sizeof(camera3_capture_result));
4050 result.frame_number = frame_number;
4051 result.result = i->settings;
4052 result.input_buffer = i->input_buffer;
4053 result.partial_result = PARTIAL_RESULT_COUNT;
4054
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004055 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004056 LOGD("Input request metadata and input buffer frame_number = %u",
4057 i->frame_number);
4058 i = erasePendingRequest(i);
4059 } else {
4060 LOGE("Could not find input request for frame number %d", frame_number);
4061 }
4062}
4063
4064/*===========================================================================
4065 * FUNCTION : handleBufferWithLock
4066 *
4067 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4068 *
4069 * PARAMETERS : @buffer: image buffer for the callback
4070 * @frame_number: frame number of the image buffer
4071 *
4072 * RETURN :
4073 *
4074 *==========================================================================*/
4075void QCamera3HardwareInterface::handleBufferWithLock(
4076 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4077{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004078 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004079
4080 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4081 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4082 }
4083
Thierry Strudel3d639192016-09-09 11:52:26 -07004084 /* Nothing to be done during error state */
4085 if ((ERROR == mState) || (DEINIT == mState)) {
4086 return;
4087 }
4088 if (mFlushPerf) {
4089 handleBuffersDuringFlushLock(buffer);
4090 return;
4091 }
4092 //not in flush
4093 // If the frame number doesn't exist in the pending request list,
4094 // directly send the buffer to the frameworks, and update pending buffers map
4095 // Otherwise, book-keep the buffer.
4096 pendingRequestIterator i = mPendingRequestsList.begin();
4097 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4098 i++;
4099 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004100
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004101 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004102 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004103 // For a reprocessing request, try to send out result metadata.
4104 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004105 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004106 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004107
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004108 // Check if this frame was dropped.
4109 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4110 m != mPendingFrameDropList.end(); m++) {
4111 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4112 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4113 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4114 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4115 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4116 frame_number, streamID);
4117 m = mPendingFrameDropList.erase(m);
4118 break;
4119 }
4120 }
4121
4122 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4123 LOGH("result frame_number = %d, buffer = %p",
4124 frame_number, buffer->buffer);
4125
4126 mPendingBuffersMap.removeBuf(buffer->buffer);
4127 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4128
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004129 if (mPreviewStarted == false) {
4130 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4131 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004132 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4133
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004134 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4135 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4136 mPreviewStarted = true;
4137
4138 // Set power hint for preview
4139 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4140 }
4141 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004142}
4143
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004144void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004145 const camera_metadata_t *resultMetadata)
4146{
4147 // Find the pending request for this result metadata.
4148 auto requestIter = mPendingRequestsList.begin();
4149 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4150 requestIter++;
4151 }
4152
4153 if (requestIter == mPendingRequestsList.end()) {
4154 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4155 return;
4156 }
4157
4158 // Update the result metadata
4159 requestIter->resultMetadata = resultMetadata;
4160
4161 // Check what type of request this is.
4162 bool liveRequest = false;
4163 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004164 // HDR+ request doesn't have partial results.
4165 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004166 } else if (requestIter->input_buffer != nullptr) {
4167 // Reprocessing request result is the same as settings.
4168 requestIter->resultMetadata = requestIter->settings;
4169 // Reprocessing request doesn't have partial results.
4170 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4171 } else {
4172 liveRequest = true;
4173 requestIter->partial_result_cnt++;
4174 mPendingLiveRequest--;
4175
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004176 {
4177 Mutex::Autolock l(gHdrPlusClientLock);
4178 // For a live request, send the metadata to HDR+ client.
4179 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4180 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4181 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4182 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004183 }
4184 }
4185
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004186 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4187 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004188 bool readyToSend = true;
4189
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004190 // Iterate through the pending requests to send out result metadata that are ready. Also if
4191 // this result metadata belongs to a live request, notify errors for previous live requests
4192 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004193 auto iter = mPendingRequestsList.begin();
4194 while (iter != mPendingRequestsList.end()) {
4195 // Check if current pending request is ready. If it's not ready, the following pending
4196 // requests are also not ready.
4197 if (readyToSend && iter->resultMetadata == nullptr) {
4198 readyToSend = false;
4199 }
4200
4201 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4202
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004203 camera3_capture_result_t result = {};
4204 result.frame_number = iter->frame_number;
4205 result.result = iter->resultMetadata;
4206 result.partial_result = iter->partial_result_cnt;
4207
4208 // If this pending buffer has result metadata, we may be able to send out shutter callback
4209 // and result metadata.
4210 if (iter->resultMetadata != nullptr) {
4211 if (!readyToSend) {
4212 // If any of the previous pending request is not ready, this pending request is
4213 // also not ready to send in order to keep shutter callbacks and result metadata
4214 // in order.
4215 iter++;
4216 continue;
4217 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004218 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4219 // If the result metadata belongs to a live request, notify errors for previous pending
4220 // live requests.
4221 mPendingLiveRequest--;
4222
4223 CameraMetadata dummyMetadata;
4224 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4225 result.result = dummyMetadata.release();
4226
4227 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004228
4229 // partial_result should be PARTIAL_RESULT_CNT in case of
4230 // ERROR_RESULT.
4231 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4232 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004233 } else {
4234 iter++;
4235 continue;
4236 }
4237
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004238 result.output_buffers = nullptr;
4239 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004240 orchestrateResult(&result);
4241
4242 // For reprocessing, result metadata is the same as settings so do not free it here to
4243 // avoid double free.
4244 if (result.result != iter->settings) {
4245 free_camera_metadata((camera_metadata_t *)result.result);
4246 }
4247 iter->resultMetadata = nullptr;
4248 iter = erasePendingRequest(iter);
4249 }
4250
4251 if (liveRequest) {
4252 for (auto &iter : mPendingRequestsList) {
4253 // Increment pipeline depth for the following pending requests.
4254 if (iter.frame_number > frameNumber) {
4255 iter.pipeline_depth++;
4256 }
4257 }
4258 }
4259
4260 unblockRequestIfNecessary();
4261}
4262
Thierry Strudel3d639192016-09-09 11:52:26 -07004263/*===========================================================================
4264 * FUNCTION : unblockRequestIfNecessary
4265 *
4266 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4267 * that mMutex is held when this function is called.
4268 *
4269 * PARAMETERS :
4270 *
4271 * RETURN :
4272 *
4273 *==========================================================================*/
4274void QCamera3HardwareInterface::unblockRequestIfNecessary()
4275{
4276 // Unblock process_capture_request
4277 pthread_cond_signal(&mRequestCond);
4278}
4279
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004280/*===========================================================================
4281 * FUNCTION : isHdrSnapshotRequest
4282 *
4283 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4284 *
4285 * PARAMETERS : camera3 request structure
4286 *
4287 * RETURN : boolean decision variable
4288 *
4289 *==========================================================================*/
4290bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4291{
4292 if (request == NULL) {
4293 LOGE("Invalid request handle");
4294 assert(0);
4295 return false;
4296 }
4297
4298 if (!mForceHdrSnapshot) {
4299 CameraMetadata frame_settings;
4300 frame_settings = request->settings;
4301
4302 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4303 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4304 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4305 return false;
4306 }
4307 } else {
4308 return false;
4309 }
4310
4311 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4312 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4313 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4314 return false;
4315 }
4316 } else {
4317 return false;
4318 }
4319 }
4320
4321 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4322 if (request->output_buffers[i].stream->format
4323 == HAL_PIXEL_FORMAT_BLOB) {
4324 return true;
4325 }
4326 }
4327
4328 return false;
4329}
4330/*===========================================================================
4331 * FUNCTION : orchestrateRequest
4332 *
4333 * DESCRIPTION: Orchestrates a capture request from camera service
4334 *
4335 * PARAMETERS :
4336 * @request : request from framework to process
4337 *
4338 * RETURN : Error status codes
4339 *
4340 *==========================================================================*/
4341int32_t QCamera3HardwareInterface::orchestrateRequest(
4342 camera3_capture_request_t *request)
4343{
4344
4345 uint32_t originalFrameNumber = request->frame_number;
4346 uint32_t originalOutputCount = request->num_output_buffers;
4347 const camera_metadata_t *original_settings = request->settings;
4348 List<InternalRequest> internallyRequestedStreams;
4349 List<InternalRequest> emptyInternalList;
4350
4351 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4352 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4353 uint32_t internalFrameNumber;
4354 CameraMetadata modified_meta;
4355
4356
4357 /* Add Blob channel to list of internally requested streams */
4358 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4359 if (request->output_buffers[i].stream->format
4360 == HAL_PIXEL_FORMAT_BLOB) {
4361 InternalRequest streamRequested;
4362 streamRequested.meteringOnly = 1;
4363 streamRequested.need_metadata = 0;
4364 streamRequested.stream = request->output_buffers[i].stream;
4365 internallyRequestedStreams.push_back(streamRequested);
4366 }
4367 }
4368 request->num_output_buffers = 0;
4369 auto itr = internallyRequestedStreams.begin();
4370
4371 /* Modify setting to set compensation */
4372 modified_meta = request->settings;
4373 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4374 uint8_t aeLock = 1;
4375 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4376 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4377 camera_metadata_t *modified_settings = modified_meta.release();
4378 request->settings = modified_settings;
4379
4380 /* Capture Settling & -2x frame */
4381 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4382 request->frame_number = internalFrameNumber;
4383 processCaptureRequest(request, internallyRequestedStreams);
4384
4385 request->num_output_buffers = originalOutputCount;
4386 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4387 request->frame_number = internalFrameNumber;
4388 processCaptureRequest(request, emptyInternalList);
4389 request->num_output_buffers = 0;
4390
4391 modified_meta = modified_settings;
4392 expCompensation = 0;
4393 aeLock = 1;
4394 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4395 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4396 modified_settings = modified_meta.release();
4397 request->settings = modified_settings;
4398
4399 /* Capture Settling & 0X frame */
4400
4401 itr = internallyRequestedStreams.begin();
4402 if (itr == internallyRequestedStreams.end()) {
4403 LOGE("Error Internally Requested Stream list is empty");
4404 assert(0);
4405 } else {
4406 itr->need_metadata = 0;
4407 itr->meteringOnly = 1;
4408 }
4409
4410 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4411 request->frame_number = internalFrameNumber;
4412 processCaptureRequest(request, internallyRequestedStreams);
4413
4414 itr = internallyRequestedStreams.begin();
4415 if (itr == internallyRequestedStreams.end()) {
4416 ALOGE("Error Internally Requested Stream list is empty");
4417 assert(0);
4418 } else {
4419 itr->need_metadata = 1;
4420 itr->meteringOnly = 0;
4421 }
4422
4423 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4424 request->frame_number = internalFrameNumber;
4425 processCaptureRequest(request, internallyRequestedStreams);
4426
4427 /* Capture 2X frame*/
4428 modified_meta = modified_settings;
4429 expCompensation = GB_HDR_2X_STEP_EV;
4430 aeLock = 1;
4431 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4432 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4433 modified_settings = modified_meta.release();
4434 request->settings = modified_settings;
4435
4436 itr = internallyRequestedStreams.begin();
4437 if (itr == internallyRequestedStreams.end()) {
4438 ALOGE("Error Internally Requested Stream list is empty");
4439 assert(0);
4440 } else {
4441 itr->need_metadata = 0;
4442 itr->meteringOnly = 1;
4443 }
4444 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4445 request->frame_number = internalFrameNumber;
4446 processCaptureRequest(request, internallyRequestedStreams);
4447
4448 itr = internallyRequestedStreams.begin();
4449 if (itr == internallyRequestedStreams.end()) {
4450 ALOGE("Error Internally Requested Stream list is empty");
4451 assert(0);
4452 } else {
4453 itr->need_metadata = 1;
4454 itr->meteringOnly = 0;
4455 }
4456
4457 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4458 request->frame_number = internalFrameNumber;
4459 processCaptureRequest(request, internallyRequestedStreams);
4460
4461
4462 /* Capture 2X on original streaming config*/
4463 internallyRequestedStreams.clear();
4464
4465 /* Restore original settings pointer */
4466 request->settings = original_settings;
4467 } else {
4468 uint32_t internalFrameNumber;
4469 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4470 request->frame_number = internalFrameNumber;
4471 return processCaptureRequest(request, internallyRequestedStreams);
4472 }
4473
4474 return NO_ERROR;
4475}
4476
4477/*===========================================================================
4478 * FUNCTION : orchestrateResult
4479 *
4480 * DESCRIPTION: Orchestrates a capture result to camera service
4481 *
4482 * PARAMETERS :
4483 * @request : request from framework to process
4484 *
4485 * RETURN :
4486 *
4487 *==========================================================================*/
4488void QCamera3HardwareInterface::orchestrateResult(
4489 camera3_capture_result_t *result)
4490{
4491 uint32_t frameworkFrameNumber;
4492 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4493 frameworkFrameNumber);
4494 if (rc != NO_ERROR) {
4495 LOGE("Cannot find translated frameworkFrameNumber");
4496 assert(0);
4497 } else {
4498 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004499 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004500 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004501 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004502 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4503 camera_metadata_entry_t entry;
4504 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4505 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004506 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004507 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4508 if (ret != OK)
4509 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004510 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004511 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004512 result->frame_number = frameworkFrameNumber;
4513 mCallbackOps->process_capture_result(mCallbackOps, result);
4514 }
4515 }
4516}
4517
4518/*===========================================================================
4519 * FUNCTION : orchestrateNotify
4520 *
4521 * DESCRIPTION: Orchestrates a notify to camera service
4522 *
4523 * PARAMETERS :
4524 * @request : request from framework to process
4525 *
4526 * RETURN :
4527 *
4528 *==========================================================================*/
4529void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4530{
4531 uint32_t frameworkFrameNumber;
4532 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004533 int32_t rc = NO_ERROR;
4534
4535 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004536 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004537
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004538 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004539 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4540 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4541 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004542 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004543 LOGE("Cannot find translated frameworkFrameNumber");
4544 assert(0);
4545 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004546 }
4547 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004548
4549 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4550 LOGD("Internal Request drop the notifyCb");
4551 } else {
4552 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4553 mCallbackOps->notify(mCallbackOps, notify_msg);
4554 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004555}
4556
4557/*===========================================================================
4558 * FUNCTION : FrameNumberRegistry
4559 *
4560 * DESCRIPTION: Constructor
4561 *
4562 * PARAMETERS :
4563 *
4564 * RETURN :
4565 *
4566 *==========================================================================*/
4567FrameNumberRegistry::FrameNumberRegistry()
4568{
4569 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4570}
4571
4572/*===========================================================================
4573 * FUNCTION : ~FrameNumberRegistry
4574 *
4575 * DESCRIPTION: Destructor
4576 *
4577 * PARAMETERS :
4578 *
4579 * RETURN :
4580 *
4581 *==========================================================================*/
4582FrameNumberRegistry::~FrameNumberRegistry()
4583{
4584}
4585
4586/*===========================================================================
4587 * FUNCTION : PurgeOldEntriesLocked
4588 *
4589 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4590 *
4591 * PARAMETERS :
4592 *
4593 * RETURN : NONE
4594 *
4595 *==========================================================================*/
4596void FrameNumberRegistry::purgeOldEntriesLocked()
4597{
4598 while (_register.begin() != _register.end()) {
4599 auto itr = _register.begin();
4600 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4601 _register.erase(itr);
4602 } else {
4603 return;
4604 }
4605 }
4606}
4607
4608/*===========================================================================
4609 * FUNCTION : allocStoreInternalFrameNumber
4610 *
4611 * DESCRIPTION: Method to note down a framework request and associate a new
4612 * internal request number against it
4613 *
4614 * PARAMETERS :
4615 * @fFrameNumber: Identifier given by framework
4616 * @internalFN : Output parameter which will have the newly generated internal
4617 * entry
4618 *
4619 * RETURN : Error code
4620 *
4621 *==========================================================================*/
4622int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4623 uint32_t &internalFrameNumber)
4624{
4625 Mutex::Autolock lock(mRegistryLock);
4626 internalFrameNumber = _nextFreeInternalNumber++;
4627 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4628 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4629 purgeOldEntriesLocked();
4630 return NO_ERROR;
4631}
4632
4633/*===========================================================================
4634 * FUNCTION : generateStoreInternalFrameNumber
4635 *
4636 * DESCRIPTION: Method to associate a new internal request number independent
4637 * of any associate with framework requests
4638 *
4639 * PARAMETERS :
4640 * @internalFrame#: Output parameter which will have the newly generated internal
4641 *
4642 *
4643 * RETURN : Error code
4644 *
4645 *==========================================================================*/
4646int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4647{
4648 Mutex::Autolock lock(mRegistryLock);
4649 internalFrameNumber = _nextFreeInternalNumber++;
4650 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4651 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4652 purgeOldEntriesLocked();
4653 return NO_ERROR;
4654}
4655
4656/*===========================================================================
4657 * FUNCTION : getFrameworkFrameNumber
4658 *
4659 * DESCRIPTION: Method to query the framework framenumber given an internal #
4660 *
4661 * PARAMETERS :
4662 * @internalFrame#: Internal reference
4663 * @frameworkframenumber: Output parameter holding framework frame entry
4664 *
4665 * RETURN : Error code
4666 *
4667 *==========================================================================*/
4668int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4669 uint32_t &frameworkFrameNumber)
4670{
4671 Mutex::Autolock lock(mRegistryLock);
4672 auto itr = _register.find(internalFrameNumber);
4673 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004674 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004675 return -ENOENT;
4676 }
4677
4678 frameworkFrameNumber = itr->second;
4679 purgeOldEntriesLocked();
4680 return NO_ERROR;
4681}
Thierry Strudel3d639192016-09-09 11:52:26 -07004682
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004683status_t QCamera3HardwareInterface::fillPbStreamConfig(
4684 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4685 QCamera3Channel *channel, uint32_t streamIndex) {
4686 if (config == nullptr) {
4687 LOGE("%s: config is null", __FUNCTION__);
4688 return BAD_VALUE;
4689 }
4690
4691 if (channel == nullptr) {
4692 LOGE("%s: channel is null", __FUNCTION__);
4693 return BAD_VALUE;
4694 }
4695
4696 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4697 if (stream == nullptr) {
4698 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4699 return NAME_NOT_FOUND;
4700 }
4701
4702 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4703 if (streamInfo == nullptr) {
4704 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4705 return NAME_NOT_FOUND;
4706 }
4707
4708 config->id = pbStreamId;
4709 config->image.width = streamInfo->dim.width;
4710 config->image.height = streamInfo->dim.height;
4711 config->image.padding = 0;
4712 config->image.format = pbStreamFormat;
4713
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004714 uint32_t totalPlaneSize = 0;
4715
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004716 // Fill plane information.
4717 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4718 pbcamera::PlaneConfiguration plane;
4719 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4720 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4721 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004722
4723 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004724 }
4725
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004726 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004727 return OK;
4728}
4729
Thierry Strudel3d639192016-09-09 11:52:26 -07004730/*===========================================================================
4731 * FUNCTION : processCaptureRequest
4732 *
4733 * DESCRIPTION: process a capture request from camera service
4734 *
4735 * PARAMETERS :
4736 * @request : request from framework to process
4737 *
4738 * RETURN :
4739 *
4740 *==========================================================================*/
4741int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004742 camera3_capture_request_t *request,
4743 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004744{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004745 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004746 int rc = NO_ERROR;
4747 int32_t request_id;
4748 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004749 bool isVidBufRequested = false;
4750 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004751 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004752
4753 pthread_mutex_lock(&mMutex);
4754
4755 // Validate current state
4756 switch (mState) {
4757 case CONFIGURED:
4758 case STARTED:
4759 /* valid state */
4760 break;
4761
4762 case ERROR:
4763 pthread_mutex_unlock(&mMutex);
4764 handleCameraDeviceError();
4765 return -ENODEV;
4766
4767 default:
4768 LOGE("Invalid state %d", mState);
4769 pthread_mutex_unlock(&mMutex);
4770 return -ENODEV;
4771 }
4772
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004773 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004774 if (rc != NO_ERROR) {
4775 LOGE("incoming request is not valid");
4776 pthread_mutex_unlock(&mMutex);
4777 return rc;
4778 }
4779
4780 meta = request->settings;
4781
4782 // For first capture request, send capture intent, and
4783 // stream on all streams
4784 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004785 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004786 // send an unconfigure to the backend so that the isp
4787 // resources are deallocated
4788 if (!mFirstConfiguration) {
4789 cam_stream_size_info_t stream_config_info;
4790 int32_t hal_version = CAM_HAL_V3;
4791 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4792 stream_config_info.buffer_info.min_buffers =
4793 MIN_INFLIGHT_REQUESTS;
4794 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004795 m_bIs4KVideo ? 0 :
4796 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004797 clear_metadata_buffer(mParameters);
4798 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4799 CAM_INTF_PARM_HAL_VERSION, hal_version);
4800 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4801 CAM_INTF_META_STREAM_INFO, stream_config_info);
4802 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4803 mParameters);
4804 if (rc < 0) {
4805 LOGE("set_parms for unconfigure failed");
4806 pthread_mutex_unlock(&mMutex);
4807 return rc;
4808 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004809
Thierry Strudel3d639192016-09-09 11:52:26 -07004810 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004811 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004812 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004813 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004814 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004815 property_get("persist.camera.is_type", is_type_value, "4");
4816 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4817 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4818 property_get("persist.camera.is_type_preview", is_type_value, "4");
4819 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4820 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004821
4822 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4823 int32_t hal_version = CAM_HAL_V3;
4824 uint8_t captureIntent =
4825 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4826 mCaptureIntent = captureIntent;
4827 clear_metadata_buffer(mParameters);
4828 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4829 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4830 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004831 if (mFirstConfiguration) {
4832 // configure instant AEC
4833 // Instant AEC is a session based parameter and it is needed only
4834 // once per complete session after open camera.
4835 // i.e. This is set only once for the first capture request, after open camera.
4836 setInstantAEC(meta);
4837 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004838 uint8_t fwkVideoStabMode=0;
4839 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4840 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4841 }
4842
Xue Tuecac74e2017-04-17 13:58:15 -07004843 // If EIS setprop is enabled then only turn it on for video/preview
4844 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004845 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004846 int32_t vsMode;
4847 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4848 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4849 rc = BAD_VALUE;
4850 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004851 LOGD("setEis %d", setEis);
4852 bool eis3Supported = false;
4853 size_t count = IS_TYPE_MAX;
4854 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4855 for (size_t i = 0; i < count; i++) {
4856 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4857 eis3Supported = true;
4858 break;
4859 }
4860 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004861
4862 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004863 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004864 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4865 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004866 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4867 is_type = isTypePreview;
4868 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4869 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4870 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004871 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004872 } else {
4873 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004874 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004875 } else {
4876 is_type = IS_TYPE_NONE;
4877 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004878 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004879 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004880 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4881 }
4882 }
4883
4884 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4885 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4886
Thierry Strudel54dc9782017-02-15 12:12:10 -08004887 //Disable tintless only if the property is set to 0
4888 memset(prop, 0, sizeof(prop));
4889 property_get("persist.camera.tintless.enable", prop, "1");
4890 int32_t tintless_value = atoi(prop);
4891
Thierry Strudel3d639192016-09-09 11:52:26 -07004892 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4893 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004894
Thierry Strudel3d639192016-09-09 11:52:26 -07004895 //Disable CDS for HFR mode or if DIS/EIS is on.
4896 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4897 //after every configure_stream
4898 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4899 (m_bIsVideo)) {
4900 int32_t cds = CAM_CDS_MODE_OFF;
4901 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4902 CAM_INTF_PARM_CDS_MODE, cds))
4903 LOGE("Failed to disable CDS for HFR mode");
4904
4905 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004906
4907 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4908 uint8_t* use_av_timer = NULL;
4909
4910 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004911 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004912 use_av_timer = &m_debug_avtimer;
4913 }
4914 else{
4915 use_av_timer =
4916 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004917 if (use_av_timer) {
4918 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4919 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004920 }
4921
4922 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4923 rc = BAD_VALUE;
4924 }
4925 }
4926
Thierry Strudel3d639192016-09-09 11:52:26 -07004927 setMobicat();
4928
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004929 uint8_t nrMode = 0;
4930 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4931 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4932 }
4933
Thierry Strudel3d639192016-09-09 11:52:26 -07004934 /* Set fps and hfr mode while sending meta stream info so that sensor
4935 * can configure appropriate streaming mode */
4936 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004937 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4938 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004939 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4940 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004941 if (rc == NO_ERROR) {
4942 int32_t max_fps =
4943 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004944 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004945 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4946 }
4947 /* For HFR, more buffers are dequeued upfront to improve the performance */
4948 if (mBatchSize) {
4949 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4950 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4951 }
4952 }
4953 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004954 LOGE("setHalFpsRange failed");
4955 }
4956 }
4957 if (meta.exists(ANDROID_CONTROL_MODE)) {
4958 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4959 rc = extractSceneMode(meta, metaMode, mParameters);
4960 if (rc != NO_ERROR) {
4961 LOGE("extractSceneMode failed");
4962 }
4963 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004964 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004965
Thierry Strudel04e026f2016-10-10 11:27:36 -07004966 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4967 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4968 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4969 rc = setVideoHdrMode(mParameters, vhdr);
4970 if (rc != NO_ERROR) {
4971 LOGE("setVideoHDR is failed");
4972 }
4973 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004974
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004975 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004976 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004977 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004978 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4979 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4980 sensorModeFullFov)) {
4981 rc = BAD_VALUE;
4982 }
4983 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004984 //TODO: validate the arguments, HSV scenemode should have only the
4985 //advertised fps ranges
4986
4987 /*set the capture intent, hal version, tintless, stream info,
4988 *and disenable parameters to the backend*/
4989 LOGD("set_parms META_STREAM_INFO " );
4990 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004991 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4992 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004993 mStreamConfigInfo.type[i],
4994 mStreamConfigInfo.stream_sizes[i].width,
4995 mStreamConfigInfo.stream_sizes[i].height,
4996 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004997 mStreamConfigInfo.format[i],
4998 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004999 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005000
Thierry Strudel3d639192016-09-09 11:52:26 -07005001 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5002 mParameters);
5003 if (rc < 0) {
5004 LOGE("set_parms failed for hal version, stream info");
5005 }
5006
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005007 cam_sensor_mode_info_t sensorModeInfo = {};
5008 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 if (rc != NO_ERROR) {
5010 LOGE("Failed to get sensor output size");
5011 pthread_mutex_unlock(&mMutex);
5012 goto error_exit;
5013 }
5014
5015 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5016 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005017 sensorModeInfo.active_array_size.width,
5018 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005019
5020 /* Set batchmode before initializing channel. Since registerBuffer
5021 * internally initializes some of the channels, better set batchmode
5022 * even before first register buffer */
5023 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5024 it != mStreamInfo.end(); it++) {
5025 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5026 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5027 && mBatchSize) {
5028 rc = channel->setBatchSize(mBatchSize);
5029 //Disable per frame map unmap for HFR/batchmode case
5030 rc |= channel->setPerFrameMapUnmap(false);
5031 if (NO_ERROR != rc) {
5032 LOGE("Channel init failed %d", rc);
5033 pthread_mutex_unlock(&mMutex);
5034 goto error_exit;
5035 }
5036 }
5037 }
5038
5039 //First initialize all streams
5040 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5041 it != mStreamInfo.end(); it++) {
5042 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005043
5044 /* Initial value of NR mode is needed before stream on */
5045 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005046 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5047 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005048 setEis) {
5049 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5050 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5051 is_type = mStreamConfigInfo.is_type[i];
5052 break;
5053 }
5054 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005055 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005056 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005057 rc = channel->initialize(IS_TYPE_NONE);
5058 }
5059 if (NO_ERROR != rc) {
5060 LOGE("Channel initialization failed %d", rc);
5061 pthread_mutex_unlock(&mMutex);
5062 goto error_exit;
5063 }
5064 }
5065
5066 if (mRawDumpChannel) {
5067 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5068 if (rc != NO_ERROR) {
5069 LOGE("Error: Raw Dump Channel init failed");
5070 pthread_mutex_unlock(&mMutex);
5071 goto error_exit;
5072 }
5073 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005074 if (mHdrPlusRawSrcChannel) {
5075 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5076 if (rc != NO_ERROR) {
5077 LOGE("Error: HDR+ RAW Source Channel init failed");
5078 pthread_mutex_unlock(&mMutex);
5079 goto error_exit;
5080 }
5081 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005082 if (mSupportChannel) {
5083 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5084 if (rc < 0) {
5085 LOGE("Support channel initialization failed");
5086 pthread_mutex_unlock(&mMutex);
5087 goto error_exit;
5088 }
5089 }
5090 if (mAnalysisChannel) {
5091 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5092 if (rc < 0) {
5093 LOGE("Analysis channel initialization failed");
5094 pthread_mutex_unlock(&mMutex);
5095 goto error_exit;
5096 }
5097 }
5098 if (mDummyBatchChannel) {
5099 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5100 if (rc < 0) {
5101 LOGE("mDummyBatchChannel setBatchSize failed");
5102 pthread_mutex_unlock(&mMutex);
5103 goto error_exit;
5104 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005105 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005106 if (rc < 0) {
5107 LOGE("mDummyBatchChannel initialization failed");
5108 pthread_mutex_unlock(&mMutex);
5109 goto error_exit;
5110 }
5111 }
5112
5113 // Set bundle info
5114 rc = setBundleInfo();
5115 if (rc < 0) {
5116 LOGE("setBundleInfo failed %d", rc);
5117 pthread_mutex_unlock(&mMutex);
5118 goto error_exit;
5119 }
5120
5121 //update settings from app here
5122 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5123 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5124 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5125 }
5126 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5127 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5128 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5129 }
5130 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5131 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5132 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5133
5134 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5135 (mLinkedCameraId != mCameraId) ) {
5136 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5137 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005138 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005139 goto error_exit;
5140 }
5141 }
5142
5143 // add bundle related cameras
5144 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5145 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005146 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5147 &m_pDualCamCmdPtr->bundle_info;
5148 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005149 if (mIsDeviceLinked)
5150 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5151 else
5152 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5153
5154 pthread_mutex_lock(&gCamLock);
5155
5156 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5157 LOGE("Dualcam: Invalid Session Id ");
5158 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005159 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005160 goto error_exit;
5161 }
5162
5163 if (mIsMainCamera == 1) {
5164 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5165 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005166 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005167 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005168 // related session id should be session id of linked session
5169 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5170 } else {
5171 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5172 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005173 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005174 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005175 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5176 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005177 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005178 pthread_mutex_unlock(&gCamLock);
5179
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005180 rc = mCameraHandle->ops->set_dual_cam_cmd(
5181 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005182 if (rc < 0) {
5183 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005184 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005185 goto error_exit;
5186 }
5187 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005188 goto no_error;
5189error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005190 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005191 return rc;
5192no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005193 mWokenUpByDaemon = false;
5194 mPendingLiveRequest = 0;
5195 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005196 }
5197
5198 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005199 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005200
5201 if (mFlushPerf) {
5202 //we cannot accept any requests during flush
5203 LOGE("process_capture_request cannot proceed during flush");
5204 pthread_mutex_unlock(&mMutex);
5205 return NO_ERROR; //should return an error
5206 }
5207
5208 if (meta.exists(ANDROID_REQUEST_ID)) {
5209 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5210 mCurrentRequestId = request_id;
5211 LOGD("Received request with id: %d", request_id);
5212 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5213 LOGE("Unable to find request id field, \
5214 & no previous id available");
5215 pthread_mutex_unlock(&mMutex);
5216 return NAME_NOT_FOUND;
5217 } else {
5218 LOGD("Re-using old request id");
5219 request_id = mCurrentRequestId;
5220 }
5221
5222 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5223 request->num_output_buffers,
5224 request->input_buffer,
5225 frameNumber);
5226 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005227 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005228 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005229 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005230 uint32_t snapshotStreamId = 0;
5231 for (size_t i = 0; i < request->num_output_buffers; i++) {
5232 const camera3_stream_buffer_t& output = request->output_buffers[i];
5233 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5234
Emilian Peev7650c122017-01-19 08:24:33 -08005235 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5236 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005237 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005238 blob_request = 1;
5239 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5240 }
5241
5242 if (output.acquire_fence != -1) {
5243 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5244 close(output.acquire_fence);
5245 if (rc != OK) {
5246 LOGE("sync wait failed %d", rc);
5247 pthread_mutex_unlock(&mMutex);
5248 return rc;
5249 }
5250 }
5251
Emilian Peev0f3c3162017-03-15 12:57:46 +00005252 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5253 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005254 depthRequestPresent = true;
5255 continue;
5256 }
5257
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005258 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005259 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005260
5261 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5262 isVidBufRequested = true;
5263 }
5264 }
5265
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005266 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5267 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5268 itr++) {
5269 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5270 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5271 channel->getStreamID(channel->getStreamTypeMask());
5272
5273 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5274 isVidBufRequested = true;
5275 }
5276 }
5277
Thierry Strudel3d639192016-09-09 11:52:26 -07005278 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005279 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005280 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005281 }
5282 if (blob_request && mRawDumpChannel) {
5283 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005284 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005285 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005286 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005287 }
5288
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005289 {
5290 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5291 // Request a RAW buffer if
5292 // 1. mHdrPlusRawSrcChannel is valid.
5293 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5294 // 3. There is no pending HDR+ request.
5295 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5296 mHdrPlusPendingRequests.size() == 0) {
5297 streamsArray.stream_request[streamsArray.num_streams].streamID =
5298 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5299 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5300 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005301 }
5302
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005303 //extract capture intent
5304 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5305 mCaptureIntent =
5306 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5307 }
5308
5309 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5310 mCacMode =
5311 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5312 }
5313
5314 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005315 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005316
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005317 {
5318 Mutex::Autolock l(gHdrPlusClientLock);
5319 // If this request has a still capture intent, try to submit an HDR+ request.
5320 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5321 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5322 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5323 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005324 }
5325
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005326 if (hdrPlusRequest) {
5327 // For a HDR+ request, just set the frame parameters.
5328 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5329 if (rc < 0) {
5330 LOGE("fail to set frame parameters");
5331 pthread_mutex_unlock(&mMutex);
5332 return rc;
5333 }
5334 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005335 /* Parse the settings:
5336 * - For every request in NORMAL MODE
5337 * - For every request in HFR mode during preview only case
5338 * - For first request of every batch in HFR mode during video
5339 * recording. In batchmode the same settings except frame number is
5340 * repeated in each request of the batch.
5341 */
5342 if (!mBatchSize ||
5343 (mBatchSize && !isVidBufRequested) ||
5344 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005345 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005346 if (rc < 0) {
5347 LOGE("fail to set frame parameters");
5348 pthread_mutex_unlock(&mMutex);
5349 return rc;
5350 }
5351 }
5352 /* For batchMode HFR, setFrameParameters is not called for every
5353 * request. But only frame number of the latest request is parsed.
5354 * Keep track of first and last frame numbers in a batch so that
5355 * metadata for the frame numbers of batch can be duplicated in
5356 * handleBatchMetadta */
5357 if (mBatchSize) {
5358 if (!mToBeQueuedVidBufs) {
5359 //start of the batch
5360 mFirstFrameNumberInBatch = request->frame_number;
5361 }
5362 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5363 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5364 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005365 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005366 return BAD_VALUE;
5367 }
5368 }
5369 if (mNeedSensorRestart) {
5370 /* Unlock the mutex as restartSensor waits on the channels to be
5371 * stopped, which in turn calls stream callback functions -
5372 * handleBufferWithLock and handleMetadataWithLock */
5373 pthread_mutex_unlock(&mMutex);
5374 rc = dynamicUpdateMetaStreamInfo();
5375 if (rc != NO_ERROR) {
5376 LOGE("Restarting the sensor failed");
5377 return BAD_VALUE;
5378 }
5379 mNeedSensorRestart = false;
5380 pthread_mutex_lock(&mMutex);
5381 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005382 if(mResetInstantAEC) {
5383 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5384 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5385 mResetInstantAEC = false;
5386 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005387 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005388 if (request->input_buffer->acquire_fence != -1) {
5389 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5390 close(request->input_buffer->acquire_fence);
5391 if (rc != OK) {
5392 LOGE("input buffer sync wait failed %d", rc);
5393 pthread_mutex_unlock(&mMutex);
5394 return rc;
5395 }
5396 }
5397 }
5398
5399 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5400 mLastCustIntentFrmNum = frameNumber;
5401 }
5402 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005403 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005404 pendingRequestIterator latestRequest;
5405 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005406 pendingRequest.num_buffers = depthRequestPresent ?
5407 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005408 pendingRequest.request_id = request_id;
5409 pendingRequest.blob_request = blob_request;
5410 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005411 if (request->input_buffer) {
5412 pendingRequest.input_buffer =
5413 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5414 *(pendingRequest.input_buffer) = *(request->input_buffer);
5415 pInputBuffer = pendingRequest.input_buffer;
5416 } else {
5417 pendingRequest.input_buffer = NULL;
5418 pInputBuffer = NULL;
5419 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005420 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005421
5422 pendingRequest.pipeline_depth = 0;
5423 pendingRequest.partial_result_cnt = 0;
5424 extractJpegMetadata(mCurJpegMeta, request);
5425 pendingRequest.jpegMetadata = mCurJpegMeta;
5426 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005427 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005428 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5429 mHybridAeEnable =
5430 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5431 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005432
5433 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5434 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005435 /* DevCamDebug metadata processCaptureRequest */
5436 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5437 mDevCamDebugMetaEnable =
5438 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5439 }
5440 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5441 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005442
5443 //extract CAC info
5444 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5445 mCacMode =
5446 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5447 }
5448 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005449 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005450
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005451 // extract enableZsl info
5452 if (gExposeEnableZslKey) {
5453 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5454 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5455 mZslEnabled = pendingRequest.enableZsl;
5456 } else {
5457 pendingRequest.enableZsl = mZslEnabled;
5458 }
5459 }
5460
Thierry Strudel3d639192016-09-09 11:52:26 -07005461 PendingBuffersInRequest bufsForCurRequest;
5462 bufsForCurRequest.frame_number = frameNumber;
5463 // Mark current timestamp for the new request
5464 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005465 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005466
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005467 if (hdrPlusRequest) {
5468 // Save settings for this request.
5469 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5470 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5471
5472 // Add to pending HDR+ request queue.
5473 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5474 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5475
5476 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5477 }
5478
Thierry Strudel3d639192016-09-09 11:52:26 -07005479 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005480 if ((request->output_buffers[i].stream->data_space ==
5481 HAL_DATASPACE_DEPTH) &&
5482 (HAL_PIXEL_FORMAT_BLOB ==
5483 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005484 continue;
5485 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005486 RequestedBufferInfo requestedBuf;
5487 memset(&requestedBuf, 0, sizeof(requestedBuf));
5488 requestedBuf.stream = request->output_buffers[i].stream;
5489 requestedBuf.buffer = NULL;
5490 pendingRequest.buffers.push_back(requestedBuf);
5491
5492 // Add to buffer handle the pending buffers list
5493 PendingBufferInfo bufferInfo;
5494 bufferInfo.buffer = request->output_buffers[i].buffer;
5495 bufferInfo.stream = request->output_buffers[i].stream;
5496 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5497 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5498 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5499 frameNumber, bufferInfo.buffer,
5500 channel->getStreamTypeMask(), bufferInfo.stream->format);
5501 }
5502 // Add this request packet into mPendingBuffersMap
5503 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5504 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5505 mPendingBuffersMap.get_num_overall_buffers());
5506
5507 latestRequest = mPendingRequestsList.insert(
5508 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005509
5510 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5511 // for the frame number.
5512 mShutterDispatcher.expectShutter(frameNumber);
5513 for (size_t i = 0; i < request->num_output_buffers; i++) {
5514 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5515 }
5516
Thierry Strudel3d639192016-09-09 11:52:26 -07005517 if(mFlush) {
5518 LOGI("mFlush is true");
5519 pthread_mutex_unlock(&mMutex);
5520 return NO_ERROR;
5521 }
5522
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005523 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5524 // channel.
5525 if (!hdrPlusRequest) {
5526 int indexUsed;
5527 // Notify metadata channel we receive a request
5528 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005529
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005530 if(request->input_buffer != NULL){
5531 LOGD("Input request, frame_number %d", frameNumber);
5532 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5533 if (NO_ERROR != rc) {
5534 LOGE("fail to set reproc parameters");
5535 pthread_mutex_unlock(&mMutex);
5536 return rc;
5537 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005538 }
5539
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005540 // Call request on other streams
5541 uint32_t streams_need_metadata = 0;
5542 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5543 for (size_t i = 0; i < request->num_output_buffers; i++) {
5544 const camera3_stream_buffer_t& output = request->output_buffers[i];
5545 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5546
5547 if (channel == NULL) {
5548 LOGW("invalid channel pointer for stream");
5549 continue;
5550 }
5551
5552 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5553 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5554 output.buffer, request->input_buffer, frameNumber);
5555 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005556 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005557 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5558 if (rc < 0) {
5559 LOGE("Fail to request on picture channel");
5560 pthread_mutex_unlock(&mMutex);
5561 return rc;
5562 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005563 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005564 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5565 assert(NULL != mDepthChannel);
5566 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005567
Emilian Peev7650c122017-01-19 08:24:33 -08005568 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5569 if (rc < 0) {
5570 LOGE("Fail to map on depth buffer");
5571 pthread_mutex_unlock(&mMutex);
5572 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005573 }
Emilian Peev7650c122017-01-19 08:24:33 -08005574 } else {
5575 LOGD("snapshot request with buffer %p, frame_number %d",
5576 output.buffer, frameNumber);
5577 if (!request->settings) {
5578 rc = channel->request(output.buffer, frameNumber,
5579 NULL, mPrevParameters, indexUsed);
5580 } else {
5581 rc = channel->request(output.buffer, frameNumber,
5582 NULL, mParameters, indexUsed);
5583 }
5584 if (rc < 0) {
5585 LOGE("Fail to request on picture channel");
5586 pthread_mutex_unlock(&mMutex);
5587 return rc;
5588 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005589
Emilian Peev7650c122017-01-19 08:24:33 -08005590 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5591 uint32_t j = 0;
5592 for (j = 0; j < streamsArray.num_streams; j++) {
5593 if (streamsArray.stream_request[j].streamID == streamId) {
5594 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5595 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5596 else
5597 streamsArray.stream_request[j].buf_index = indexUsed;
5598 break;
5599 }
5600 }
5601 if (j == streamsArray.num_streams) {
5602 LOGE("Did not find matching stream to update index");
5603 assert(0);
5604 }
5605
5606 pendingBufferIter->need_metadata = true;
5607 streams_need_metadata++;
5608 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005609 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005610 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5611 bool needMetadata = false;
5612 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5613 rc = yuvChannel->request(output.buffer, frameNumber,
5614 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5615 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005616 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005617 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005618 pthread_mutex_unlock(&mMutex);
5619 return rc;
5620 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005621
5622 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5623 uint32_t j = 0;
5624 for (j = 0; j < streamsArray.num_streams; j++) {
5625 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005626 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5627 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5628 else
5629 streamsArray.stream_request[j].buf_index = indexUsed;
5630 break;
5631 }
5632 }
5633 if (j == streamsArray.num_streams) {
5634 LOGE("Did not find matching stream to update index");
5635 assert(0);
5636 }
5637
5638 pendingBufferIter->need_metadata = needMetadata;
5639 if (needMetadata)
5640 streams_need_metadata += 1;
5641 LOGD("calling YUV channel request, need_metadata is %d",
5642 needMetadata);
5643 } else {
5644 LOGD("request with buffer %p, frame_number %d",
5645 output.buffer, frameNumber);
5646
5647 rc = channel->request(output.buffer, frameNumber, indexUsed);
5648
5649 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5650 uint32_t j = 0;
5651 for (j = 0; j < streamsArray.num_streams; j++) {
5652 if (streamsArray.stream_request[j].streamID == streamId) {
5653 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5654 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5655 else
5656 streamsArray.stream_request[j].buf_index = indexUsed;
5657 break;
5658 }
5659 }
5660 if (j == streamsArray.num_streams) {
5661 LOGE("Did not find matching stream to update index");
5662 assert(0);
5663 }
5664
5665 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5666 && mBatchSize) {
5667 mToBeQueuedVidBufs++;
5668 if (mToBeQueuedVidBufs == mBatchSize) {
5669 channel->queueBatchBuf();
5670 }
5671 }
5672 if (rc < 0) {
5673 LOGE("request failed");
5674 pthread_mutex_unlock(&mMutex);
5675 return rc;
5676 }
5677 }
5678 pendingBufferIter++;
5679 }
5680
5681 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5682 itr++) {
5683 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5684
5685 if (channel == NULL) {
5686 LOGE("invalid channel pointer for stream");
5687 assert(0);
5688 return BAD_VALUE;
5689 }
5690
5691 InternalRequest requestedStream;
5692 requestedStream = (*itr);
5693
5694
5695 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5696 LOGD("snapshot request internally input buffer %p, frame_number %d",
5697 request->input_buffer, frameNumber);
5698 if(request->input_buffer != NULL){
5699 rc = channel->request(NULL, frameNumber,
5700 pInputBuffer, &mReprocMeta, indexUsed, true,
5701 requestedStream.meteringOnly);
5702 if (rc < 0) {
5703 LOGE("Fail to request on picture channel");
5704 pthread_mutex_unlock(&mMutex);
5705 return rc;
5706 }
5707 } else {
5708 LOGD("snapshot request with frame_number %d", frameNumber);
5709 if (!request->settings) {
5710 rc = channel->request(NULL, frameNumber,
5711 NULL, mPrevParameters, indexUsed, true,
5712 requestedStream.meteringOnly);
5713 } else {
5714 rc = channel->request(NULL, frameNumber,
5715 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5716 }
5717 if (rc < 0) {
5718 LOGE("Fail to request on picture channel");
5719 pthread_mutex_unlock(&mMutex);
5720 return rc;
5721 }
5722
5723 if ((*itr).meteringOnly != 1) {
5724 requestedStream.need_metadata = 1;
5725 streams_need_metadata++;
5726 }
5727 }
5728
5729 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5730 uint32_t j = 0;
5731 for (j = 0; j < streamsArray.num_streams; j++) {
5732 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005733 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5734 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5735 else
5736 streamsArray.stream_request[j].buf_index = indexUsed;
5737 break;
5738 }
5739 }
5740 if (j == streamsArray.num_streams) {
5741 LOGE("Did not find matching stream to update index");
5742 assert(0);
5743 }
5744
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005745 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005746 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005747 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005748 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005749 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005750 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005751 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005752
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005753 //If 2 streams have need_metadata set to true, fail the request, unless
5754 //we copy/reference count the metadata buffer
5755 if (streams_need_metadata > 1) {
5756 LOGE("not supporting request in which two streams requires"
5757 " 2 HAL metadata for reprocessing");
5758 pthread_mutex_unlock(&mMutex);
5759 return -EINVAL;
5760 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005761
Emilian Peev7650c122017-01-19 08:24:33 -08005762 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5763 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5764 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5765 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5766 pthread_mutex_unlock(&mMutex);
5767 return BAD_VALUE;
5768 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005769 if (request->input_buffer == NULL) {
5770 /* Set the parameters to backend:
5771 * - For every request in NORMAL MODE
5772 * - For every request in HFR mode during preview only case
5773 * - Once every batch in HFR mode during video recording
5774 */
5775 if (!mBatchSize ||
5776 (mBatchSize && !isVidBufRequested) ||
5777 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5778 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5779 mBatchSize, isVidBufRequested,
5780 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005781
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005782 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5783 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5784 uint32_t m = 0;
5785 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5786 if (streamsArray.stream_request[k].streamID ==
5787 mBatchedStreamsArray.stream_request[m].streamID)
5788 break;
5789 }
5790 if (m == mBatchedStreamsArray.num_streams) {
5791 mBatchedStreamsArray.stream_request\
5792 [mBatchedStreamsArray.num_streams].streamID =
5793 streamsArray.stream_request[k].streamID;
5794 mBatchedStreamsArray.stream_request\
5795 [mBatchedStreamsArray.num_streams].buf_index =
5796 streamsArray.stream_request[k].buf_index;
5797 mBatchedStreamsArray.num_streams =
5798 mBatchedStreamsArray.num_streams + 1;
5799 }
5800 }
5801 streamsArray = mBatchedStreamsArray;
5802 }
5803 /* Update stream id of all the requested buffers */
5804 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5805 streamsArray)) {
5806 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005807 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005808 return BAD_VALUE;
5809 }
5810
5811 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5812 mParameters);
5813 if (rc < 0) {
5814 LOGE("set_parms failed");
5815 }
5816 /* reset to zero coz, the batch is queued */
5817 mToBeQueuedVidBufs = 0;
5818 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5819 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5820 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005821 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5822 uint32_t m = 0;
5823 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5824 if (streamsArray.stream_request[k].streamID ==
5825 mBatchedStreamsArray.stream_request[m].streamID)
5826 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005827 }
5828 if (m == mBatchedStreamsArray.num_streams) {
5829 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5830 streamID = streamsArray.stream_request[k].streamID;
5831 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5832 buf_index = streamsArray.stream_request[k].buf_index;
5833 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5834 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005835 }
5836 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005837 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005838
5839 // Start all streams after the first setting is sent, so that the
5840 // setting can be applied sooner: (0 + apply_delay)th frame.
5841 if (mState == CONFIGURED && mChannelHandle) {
5842 //Then start them.
5843 LOGH("Start META Channel");
5844 rc = mMetadataChannel->start();
5845 if (rc < 0) {
5846 LOGE("META channel start failed");
5847 pthread_mutex_unlock(&mMutex);
5848 return rc;
5849 }
5850
5851 if (mAnalysisChannel) {
5852 rc = mAnalysisChannel->start();
5853 if (rc < 0) {
5854 LOGE("Analysis channel start failed");
5855 mMetadataChannel->stop();
5856 pthread_mutex_unlock(&mMutex);
5857 return rc;
5858 }
5859 }
5860
5861 if (mSupportChannel) {
5862 rc = mSupportChannel->start();
5863 if (rc < 0) {
5864 LOGE("Support channel start failed");
5865 mMetadataChannel->stop();
5866 /* Although support and analysis are mutually exclusive today
5867 adding it in anycase for future proofing */
5868 if (mAnalysisChannel) {
5869 mAnalysisChannel->stop();
5870 }
5871 pthread_mutex_unlock(&mMutex);
5872 return rc;
5873 }
5874 }
5875 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5876 it != mStreamInfo.end(); it++) {
5877 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5878 LOGH("Start Processing Channel mask=%d",
5879 channel->getStreamTypeMask());
5880 rc = channel->start();
5881 if (rc < 0) {
5882 LOGE("channel start failed");
5883 pthread_mutex_unlock(&mMutex);
5884 return rc;
5885 }
5886 }
5887
5888 if (mRawDumpChannel) {
5889 LOGD("Starting raw dump stream");
5890 rc = mRawDumpChannel->start();
5891 if (rc != NO_ERROR) {
5892 LOGE("Error Starting Raw Dump Channel");
5893 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5894 it != mStreamInfo.end(); it++) {
5895 QCamera3Channel *channel =
5896 (QCamera3Channel *)(*it)->stream->priv;
5897 LOGH("Stopping Processing Channel mask=%d",
5898 channel->getStreamTypeMask());
5899 channel->stop();
5900 }
5901 if (mSupportChannel)
5902 mSupportChannel->stop();
5903 if (mAnalysisChannel) {
5904 mAnalysisChannel->stop();
5905 }
5906 mMetadataChannel->stop();
5907 pthread_mutex_unlock(&mMutex);
5908 return rc;
5909 }
5910 }
5911
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005912 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005913 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005914 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005915 if (rc != NO_ERROR) {
5916 LOGE("start_channel failed %d", rc);
5917 pthread_mutex_unlock(&mMutex);
5918 return rc;
5919 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005920
5921 {
5922 // Configure Easel for stream on.
5923 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005924
5925 // Now that sensor mode should have been selected, get the selected sensor mode
5926 // info.
5927 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5928 getCurrentSensorModeInfo(mSensorModeInfo);
5929
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005930 if (EaselManagerClientOpened) {
5931 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chena6c99062017-05-23 13:45:06 -07005932 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
5933 /*enableIpu*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005934 if (rc != OK) {
5935 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5936 mCameraId, mSensorModeInfo.op_pixel_clk);
5937 pthread_mutex_unlock(&mMutex);
5938 return rc;
5939 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005940 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005941 }
5942 }
5943
5944 // Start sensor streaming.
5945 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5946 mChannelHandle);
5947 if (rc != NO_ERROR) {
5948 LOGE("start_sensor_stream_on failed %d", rc);
5949 pthread_mutex_unlock(&mMutex);
5950 return rc;
5951 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005952 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005953 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005954 }
5955
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005956 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen3b630e52017-06-02 15:39:47 -07005957 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || mCameraId == 0) {
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005958 Mutex::Autolock l(gHdrPlusClientLock);
5959 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5960 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5961 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5962 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5963 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5964 rc = enableHdrPlusModeLocked();
5965 if (rc != OK) {
5966 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
5967 pthread_mutex_unlock(&mMutex);
5968 return rc;
5969 }
5970
5971 mFirstPreviewIntentSeen = true;
5972 }
5973 }
5974
Thierry Strudel3d639192016-09-09 11:52:26 -07005975 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5976
5977 mState = STARTED;
5978 // Added a timed condition wait
5979 struct timespec ts;
5980 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005981 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005982 if (rc < 0) {
5983 isValidTimeout = 0;
5984 LOGE("Error reading the real time clock!!");
5985 }
5986 else {
5987 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005988 int64_t timeout = 5;
5989 {
5990 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5991 // If there is a pending HDR+ request, the following requests may be blocked until the
5992 // HDR+ request is done. So allow a longer timeout.
5993 if (mHdrPlusPendingRequests.size() > 0) {
5994 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5995 }
5996 }
5997 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005998 }
5999 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006000 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006001 (mState != ERROR) && (mState != DEINIT)) {
6002 if (!isValidTimeout) {
6003 LOGD("Blocking on conditional wait");
6004 pthread_cond_wait(&mRequestCond, &mMutex);
6005 }
6006 else {
6007 LOGD("Blocking on timed conditional wait");
6008 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6009 if (rc == ETIMEDOUT) {
6010 rc = -ENODEV;
6011 LOGE("Unblocked on timeout!!!!");
6012 break;
6013 }
6014 }
6015 LOGD("Unblocked");
6016 if (mWokenUpByDaemon) {
6017 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006018 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006019 break;
6020 }
6021 }
6022 pthread_mutex_unlock(&mMutex);
6023
6024 return rc;
6025}
6026
6027/*===========================================================================
6028 * FUNCTION : dump
6029 *
6030 * DESCRIPTION:
6031 *
6032 * PARAMETERS :
6033 *
6034 *
6035 * RETURN :
6036 *==========================================================================*/
6037void QCamera3HardwareInterface::dump(int fd)
6038{
6039 pthread_mutex_lock(&mMutex);
6040 dprintf(fd, "\n Camera HAL3 information Begin \n");
6041
6042 dprintf(fd, "\nNumber of pending requests: %zu \n",
6043 mPendingRequestsList.size());
6044 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6045 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6046 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6047 for(pendingRequestIterator i = mPendingRequestsList.begin();
6048 i != mPendingRequestsList.end(); i++) {
6049 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6050 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6051 i->input_buffer);
6052 }
6053 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6054 mPendingBuffersMap.get_num_overall_buffers());
6055 dprintf(fd, "-------+------------------\n");
6056 dprintf(fd, " Frame | Stream type mask \n");
6057 dprintf(fd, "-------+------------------\n");
6058 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6059 for(auto &j : req.mPendingBufferList) {
6060 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6061 dprintf(fd, " %5d | %11d \n",
6062 req.frame_number, channel->getStreamTypeMask());
6063 }
6064 }
6065 dprintf(fd, "-------+------------------\n");
6066
6067 dprintf(fd, "\nPending frame drop list: %zu\n",
6068 mPendingFrameDropList.size());
6069 dprintf(fd, "-------+-----------\n");
6070 dprintf(fd, " Frame | Stream ID \n");
6071 dprintf(fd, "-------+-----------\n");
6072 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6073 i != mPendingFrameDropList.end(); i++) {
6074 dprintf(fd, " %5d | %9d \n",
6075 i->frame_number, i->stream_ID);
6076 }
6077 dprintf(fd, "-------+-----------\n");
6078
6079 dprintf(fd, "\n Camera HAL3 information End \n");
6080
6081 /* use dumpsys media.camera as trigger to send update debug level event */
6082 mUpdateDebugLevel = true;
6083 pthread_mutex_unlock(&mMutex);
6084 return;
6085}
6086
6087/*===========================================================================
6088 * FUNCTION : flush
6089 *
6090 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6091 * conditionally restarts channels
6092 *
6093 * PARAMETERS :
6094 * @ restartChannels: re-start all channels
6095 *
6096 *
6097 * RETURN :
6098 * 0 on success
6099 * Error code on failure
6100 *==========================================================================*/
6101int QCamera3HardwareInterface::flush(bool restartChannels)
6102{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006103 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006104 int32_t rc = NO_ERROR;
6105
6106 LOGD("Unblocking Process Capture Request");
6107 pthread_mutex_lock(&mMutex);
6108 mFlush = true;
6109 pthread_mutex_unlock(&mMutex);
6110
6111 rc = stopAllChannels();
6112 // unlink of dualcam
6113 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006114 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6115 &m_pDualCamCmdPtr->bundle_info;
6116 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006117 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6118 pthread_mutex_lock(&gCamLock);
6119
6120 if (mIsMainCamera == 1) {
6121 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6122 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006123 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006124 // related session id should be session id of linked session
6125 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6126 } else {
6127 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6128 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006129 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006130 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6131 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006132 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006133 pthread_mutex_unlock(&gCamLock);
6134
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006135 rc = mCameraHandle->ops->set_dual_cam_cmd(
6136 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006137 if (rc < 0) {
6138 LOGE("Dualcam: Unlink failed, but still proceed to close");
6139 }
6140 }
6141
6142 if (rc < 0) {
6143 LOGE("stopAllChannels failed");
6144 return rc;
6145 }
6146 if (mChannelHandle) {
6147 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6148 mChannelHandle);
6149 }
6150
6151 // Reset bundle info
6152 rc = setBundleInfo();
6153 if (rc < 0) {
6154 LOGE("setBundleInfo failed %d", rc);
6155 return rc;
6156 }
6157
6158 // Mutex Lock
6159 pthread_mutex_lock(&mMutex);
6160
6161 // Unblock process_capture_request
6162 mPendingLiveRequest = 0;
6163 pthread_cond_signal(&mRequestCond);
6164
6165 rc = notifyErrorForPendingRequests();
6166 if (rc < 0) {
6167 LOGE("notifyErrorForPendingRequests failed");
6168 pthread_mutex_unlock(&mMutex);
6169 return rc;
6170 }
6171
6172 mFlush = false;
6173
6174 // Start the Streams/Channels
6175 if (restartChannels) {
6176 rc = startAllChannels();
6177 if (rc < 0) {
6178 LOGE("startAllChannels failed");
6179 pthread_mutex_unlock(&mMutex);
6180 return rc;
6181 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006182 if (mChannelHandle) {
6183 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006184 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006185 if (rc < 0) {
6186 LOGE("start_channel failed");
6187 pthread_mutex_unlock(&mMutex);
6188 return rc;
6189 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006190 }
6191 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006192 pthread_mutex_unlock(&mMutex);
6193
6194 return 0;
6195}
6196
6197/*===========================================================================
6198 * FUNCTION : flushPerf
6199 *
6200 * DESCRIPTION: This is the performance optimization version of flush that does
6201 * not use stream off, rather flushes the system
6202 *
6203 * PARAMETERS :
6204 *
6205 *
6206 * RETURN : 0 : success
6207 * -EINVAL: input is malformed (device is not valid)
6208 * -ENODEV: if the device has encountered a serious error
6209 *==========================================================================*/
6210int QCamera3HardwareInterface::flushPerf()
6211{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006212 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006213 int32_t rc = 0;
6214 struct timespec timeout;
6215 bool timed_wait = false;
6216
6217 pthread_mutex_lock(&mMutex);
6218 mFlushPerf = true;
6219 mPendingBuffersMap.numPendingBufsAtFlush =
6220 mPendingBuffersMap.get_num_overall_buffers();
6221 LOGD("Calling flush. Wait for %d buffers to return",
6222 mPendingBuffersMap.numPendingBufsAtFlush);
6223
6224 /* send the flush event to the backend */
6225 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6226 if (rc < 0) {
6227 LOGE("Error in flush: IOCTL failure");
6228 mFlushPerf = false;
6229 pthread_mutex_unlock(&mMutex);
6230 return -ENODEV;
6231 }
6232
6233 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6234 LOGD("No pending buffers in HAL, return flush");
6235 mFlushPerf = false;
6236 pthread_mutex_unlock(&mMutex);
6237 return rc;
6238 }
6239
6240 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006241 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006242 if (rc < 0) {
6243 LOGE("Error reading the real time clock, cannot use timed wait");
6244 } else {
6245 timeout.tv_sec += FLUSH_TIMEOUT;
6246 timed_wait = true;
6247 }
6248
6249 //Block on conditional variable
6250 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6251 LOGD("Waiting on mBuffersCond");
6252 if (!timed_wait) {
6253 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6254 if (rc != 0) {
6255 LOGE("pthread_cond_wait failed due to rc = %s",
6256 strerror(rc));
6257 break;
6258 }
6259 } else {
6260 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6261 if (rc != 0) {
6262 LOGE("pthread_cond_timedwait failed due to rc = %s",
6263 strerror(rc));
6264 break;
6265 }
6266 }
6267 }
6268 if (rc != 0) {
6269 mFlushPerf = false;
6270 pthread_mutex_unlock(&mMutex);
6271 return -ENODEV;
6272 }
6273
6274 LOGD("Received buffers, now safe to return them");
6275
6276 //make sure the channels handle flush
6277 //currently only required for the picture channel to release snapshot resources
6278 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6279 it != mStreamInfo.end(); it++) {
6280 QCamera3Channel *channel = (*it)->channel;
6281 if (channel) {
6282 rc = channel->flush();
6283 if (rc) {
6284 LOGE("Flushing the channels failed with error %d", rc);
6285 // even though the channel flush failed we need to continue and
6286 // return the buffers we have to the framework, however the return
6287 // value will be an error
6288 rc = -ENODEV;
6289 }
6290 }
6291 }
6292
6293 /* notify the frameworks and send errored results */
6294 rc = notifyErrorForPendingRequests();
6295 if (rc < 0) {
6296 LOGE("notifyErrorForPendingRequests failed");
6297 pthread_mutex_unlock(&mMutex);
6298 return rc;
6299 }
6300
6301 //unblock process_capture_request
6302 mPendingLiveRequest = 0;
6303 unblockRequestIfNecessary();
6304
6305 mFlushPerf = false;
6306 pthread_mutex_unlock(&mMutex);
6307 LOGD ("Flush Operation complete. rc = %d", rc);
6308 return rc;
6309}
6310
6311/*===========================================================================
6312 * FUNCTION : handleCameraDeviceError
6313 *
6314 * DESCRIPTION: This function calls internal flush and notifies the error to
6315 * framework and updates the state variable.
6316 *
6317 * PARAMETERS : None
6318 *
6319 * RETURN : NO_ERROR on Success
6320 * Error code on failure
6321 *==========================================================================*/
6322int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6323{
6324 int32_t rc = NO_ERROR;
6325
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006326 {
6327 Mutex::Autolock lock(mFlushLock);
6328 pthread_mutex_lock(&mMutex);
6329 if (mState != ERROR) {
6330 //if mState != ERROR, nothing to be done
6331 pthread_mutex_unlock(&mMutex);
6332 return NO_ERROR;
6333 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006334 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006335
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006336 rc = flush(false /* restart channels */);
6337 if (NO_ERROR != rc) {
6338 LOGE("internal flush to handle mState = ERROR failed");
6339 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006340
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006341 pthread_mutex_lock(&mMutex);
6342 mState = DEINIT;
6343 pthread_mutex_unlock(&mMutex);
6344 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006345
6346 camera3_notify_msg_t notify_msg;
6347 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6348 notify_msg.type = CAMERA3_MSG_ERROR;
6349 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6350 notify_msg.message.error.error_stream = NULL;
6351 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006352 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006353
6354 return rc;
6355}
6356
6357/*===========================================================================
6358 * FUNCTION : captureResultCb
6359 *
6360 * DESCRIPTION: Callback handler for all capture result
6361 * (streams, as well as metadata)
6362 *
6363 * PARAMETERS :
6364 * @metadata : metadata information
6365 * @buffer : actual gralloc buffer to be returned to frameworks.
6366 * NULL if metadata.
6367 *
6368 * RETURN : NONE
6369 *==========================================================================*/
6370void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6371 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6372{
6373 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006374 pthread_mutex_lock(&mMutex);
6375 uint8_t batchSize = mBatchSize;
6376 pthread_mutex_unlock(&mMutex);
6377 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006378 handleBatchMetadata(metadata_buf,
6379 true /* free_and_bufdone_meta_buf */);
6380 } else { /* mBatchSize = 0 */
6381 hdrPlusPerfLock(metadata_buf);
6382 pthread_mutex_lock(&mMutex);
6383 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006384 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006385 true /* last urgent frame of batch metadata */,
6386 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006387 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006388 pthread_mutex_unlock(&mMutex);
6389 }
6390 } else if (isInputBuffer) {
6391 pthread_mutex_lock(&mMutex);
6392 handleInputBufferWithLock(frame_number);
6393 pthread_mutex_unlock(&mMutex);
6394 } else {
6395 pthread_mutex_lock(&mMutex);
6396 handleBufferWithLock(buffer, frame_number);
6397 pthread_mutex_unlock(&mMutex);
6398 }
6399 return;
6400}
6401
6402/*===========================================================================
6403 * FUNCTION : getReprocessibleOutputStreamId
6404 *
6405 * DESCRIPTION: Get source output stream id for the input reprocess stream
6406 * based on size and format, which would be the largest
6407 * output stream if an input stream exists.
6408 *
6409 * PARAMETERS :
6410 * @id : return the stream id if found
6411 *
6412 * RETURN : int32_t type of status
6413 * NO_ERROR -- success
6414 * none-zero failure code
6415 *==========================================================================*/
6416int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6417{
6418 /* check if any output or bidirectional stream with the same size and format
6419 and return that stream */
6420 if ((mInputStreamInfo.dim.width > 0) &&
6421 (mInputStreamInfo.dim.height > 0)) {
6422 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6423 it != mStreamInfo.end(); it++) {
6424
6425 camera3_stream_t *stream = (*it)->stream;
6426 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6427 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6428 (stream->format == mInputStreamInfo.format)) {
6429 // Usage flag for an input stream and the source output stream
6430 // may be different.
6431 LOGD("Found reprocessible output stream! %p", *it);
6432 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6433 stream->usage, mInputStreamInfo.usage);
6434
6435 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6436 if (channel != NULL && channel->mStreams[0]) {
6437 id = channel->mStreams[0]->getMyServerID();
6438 return NO_ERROR;
6439 }
6440 }
6441 }
6442 } else {
6443 LOGD("No input stream, so no reprocessible output stream");
6444 }
6445 return NAME_NOT_FOUND;
6446}
6447
6448/*===========================================================================
6449 * FUNCTION : lookupFwkName
6450 *
6451 * DESCRIPTION: In case the enum is not same in fwk and backend
6452 * make sure the parameter is correctly propogated
6453 *
6454 * PARAMETERS :
6455 * @arr : map between the two enums
6456 * @len : len of the map
6457 * @hal_name : name of the hal_parm to map
6458 *
6459 * RETURN : int type of status
6460 * fwk_name -- success
6461 * none-zero failure code
6462 *==========================================================================*/
6463template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6464 size_t len, halType hal_name)
6465{
6466
6467 for (size_t i = 0; i < len; i++) {
6468 if (arr[i].hal_name == hal_name) {
6469 return arr[i].fwk_name;
6470 }
6471 }
6472
6473 /* Not able to find matching framework type is not necessarily
6474 * an error case. This happens when mm-camera supports more attributes
6475 * than the frameworks do */
6476 LOGH("Cannot find matching framework type");
6477 return NAME_NOT_FOUND;
6478}
6479
6480/*===========================================================================
6481 * FUNCTION : lookupHalName
6482 *
6483 * DESCRIPTION: In case the enum is not same in fwk and backend
6484 * make sure the parameter is correctly propogated
6485 *
6486 * PARAMETERS :
6487 * @arr : map between the two enums
6488 * @len : len of the map
6489 * @fwk_name : name of the hal_parm to map
6490 *
6491 * RETURN : int32_t type of status
6492 * hal_name -- success
6493 * none-zero failure code
6494 *==========================================================================*/
6495template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6496 size_t len, fwkType fwk_name)
6497{
6498 for (size_t i = 0; i < len; i++) {
6499 if (arr[i].fwk_name == fwk_name) {
6500 return arr[i].hal_name;
6501 }
6502 }
6503
6504 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6505 return NAME_NOT_FOUND;
6506}
6507
6508/*===========================================================================
6509 * FUNCTION : lookupProp
6510 *
6511 * DESCRIPTION: lookup a value by its name
6512 *
6513 * PARAMETERS :
6514 * @arr : map between the two enums
6515 * @len : size of the map
6516 * @name : name to be looked up
6517 *
6518 * RETURN : Value if found
6519 * CAM_CDS_MODE_MAX if not found
6520 *==========================================================================*/
6521template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6522 size_t len, const char *name)
6523{
6524 if (name) {
6525 for (size_t i = 0; i < len; i++) {
6526 if (!strcmp(arr[i].desc, name)) {
6527 return arr[i].val;
6528 }
6529 }
6530 }
6531 return CAM_CDS_MODE_MAX;
6532}
6533
6534/*===========================================================================
6535 *
6536 * DESCRIPTION:
6537 *
6538 * PARAMETERS :
6539 * @metadata : metadata information from callback
6540 * @timestamp: metadata buffer timestamp
6541 * @request_id: request id
6542 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006543 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006544 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6545 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006546 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006547 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6548 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006549 *
6550 * RETURN : camera_metadata_t*
6551 * metadata in a format specified by fwk
6552 *==========================================================================*/
6553camera_metadata_t*
6554QCamera3HardwareInterface::translateFromHalMetadata(
6555 metadata_buffer_t *metadata,
6556 nsecs_t timestamp,
6557 int32_t request_id,
6558 const CameraMetadata& jpegMetadata,
6559 uint8_t pipeline_depth,
6560 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006561 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006562 /* DevCamDebug metadata translateFromHalMetadata argument */
6563 uint8_t DevCamDebug_meta_enable,
6564 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006565 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006566 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006567 bool lastMetadataInBatch,
6568 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006569{
6570 CameraMetadata camMetadata;
6571 camera_metadata_t *resultMetadata;
6572
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006573 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006574 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6575 * Timestamp is needed because it's used for shutter notify calculation.
6576 * */
6577 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6578 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006579 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006580 }
6581
Thierry Strudel3d639192016-09-09 11:52:26 -07006582 if (jpegMetadata.entryCount())
6583 camMetadata.append(jpegMetadata);
6584
6585 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6586 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6587 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6588 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006589 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006590 if (mBatchSize == 0) {
6591 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6592 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6593 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006594
Samuel Ha68ba5172016-12-15 18:41:12 -08006595 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6596 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6597 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6598 // DevCamDebug metadata translateFromHalMetadata AF
6599 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6600 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6601 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6602 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6603 }
6604 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6605 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6606 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6607 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6608 }
6609 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6610 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6611 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6612 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6613 }
6614 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6615 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6616 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6617 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6618 }
6619 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6620 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6621 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6622 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6623 }
6624 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6625 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6626 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6627 *DevCamDebug_af_monitor_pdaf_target_pos;
6628 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6629 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6630 }
6631 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6632 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6633 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6634 *DevCamDebug_af_monitor_pdaf_confidence;
6635 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6636 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6637 }
6638 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6639 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6640 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6641 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6642 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6643 }
6644 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6645 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6646 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6647 *DevCamDebug_af_monitor_tof_target_pos;
6648 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6649 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6650 }
6651 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6652 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6653 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6654 *DevCamDebug_af_monitor_tof_confidence;
6655 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6656 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6657 }
6658 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6659 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6660 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6661 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6662 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6663 }
6664 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6665 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6666 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6667 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6668 &fwk_DevCamDebug_af_monitor_type_select, 1);
6669 }
6670 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6671 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6672 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6673 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6674 &fwk_DevCamDebug_af_monitor_refocus, 1);
6675 }
6676 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6677 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6678 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6679 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6680 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6681 }
6682 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6683 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6684 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6685 *DevCamDebug_af_search_pdaf_target_pos;
6686 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6687 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6688 }
6689 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6690 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6691 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6692 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6693 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6694 }
6695 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6696 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6697 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6698 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6699 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6700 }
6701 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6702 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6703 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6704 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6705 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6706 }
6707 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6708 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6709 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6710 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6711 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6712 }
6713 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6714 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6715 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6716 *DevCamDebug_af_search_tof_target_pos;
6717 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6718 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6719 }
6720 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6721 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6722 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6723 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6724 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6725 }
6726 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6727 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6728 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6729 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6730 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6731 }
6732 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6733 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6734 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6735 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6736 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6737 }
6738 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6739 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6740 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6741 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6742 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6743 }
6744 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6745 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6746 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6747 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6748 &fwk_DevCamDebug_af_search_type_select, 1);
6749 }
6750 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6751 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6752 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6753 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6754 &fwk_DevCamDebug_af_search_next_pos, 1);
6755 }
6756 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6757 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6758 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6759 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6760 &fwk_DevCamDebug_af_search_target_pos, 1);
6761 }
6762 // DevCamDebug metadata translateFromHalMetadata AEC
6763 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6764 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6765 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6766 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6767 }
6768 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6769 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6770 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6771 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6772 }
6773 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6774 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6775 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6776 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6777 }
6778 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6779 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6780 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6781 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6782 }
6783 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6784 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6785 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6786 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6787 }
6788 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6789 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6790 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6791 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6792 }
6793 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6794 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6795 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6796 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6797 }
6798 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6799 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6800 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6801 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6802 }
Samuel Ha34229982017-02-17 13:51:11 -08006803 // DevCamDebug metadata translateFromHalMetadata zzHDR
6804 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6805 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6806 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6807 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6808 }
6809 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6810 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006811 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006812 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6813 }
6814 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6815 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6816 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6817 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6818 }
6819 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6820 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006821 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006822 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6823 }
6824 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6825 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6826 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6827 *DevCamDebug_aec_hdr_sensitivity_ratio;
6828 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6829 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6830 }
6831 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6832 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6833 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6834 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6835 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6836 }
6837 // DevCamDebug metadata translateFromHalMetadata ADRC
6838 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6839 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6840 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6841 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6842 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6843 }
6844 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6845 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6846 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6847 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6848 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6849 }
6850 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6851 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6852 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6853 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6854 }
6855 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6856 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6857 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6858 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6859 }
6860 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6861 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6862 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6863 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6864 }
6865 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6866 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6867 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6868 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6869 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006870 // DevCamDebug metadata translateFromHalMetadata AWB
6871 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6872 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6873 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6874 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6875 }
6876 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6877 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6878 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6879 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6880 }
6881 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6882 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6883 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6884 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6885 }
6886 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6887 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6888 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6889 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6890 }
6891 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6892 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6893 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6894 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6895 }
6896 }
6897 // atrace_end(ATRACE_TAG_ALWAYS);
6898
Thierry Strudel3d639192016-09-09 11:52:26 -07006899 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6900 int64_t fwk_frame_number = *frame_number;
6901 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6902 }
6903
6904 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6905 int32_t fps_range[2];
6906 fps_range[0] = (int32_t)float_range->min_fps;
6907 fps_range[1] = (int32_t)float_range->max_fps;
6908 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6909 fps_range, 2);
6910 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6911 fps_range[0], fps_range[1]);
6912 }
6913
6914 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6915 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6916 }
6917
6918 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6919 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6920 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6921 *sceneMode);
6922 if (NAME_NOT_FOUND != val) {
6923 uint8_t fwkSceneMode = (uint8_t)val;
6924 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6925 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6926 fwkSceneMode);
6927 }
6928 }
6929
6930 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6931 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6932 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6933 }
6934
6935 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6936 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6937 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6938 }
6939
6940 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6941 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6942 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6943 }
6944
6945 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6946 CAM_INTF_META_EDGE_MODE, metadata) {
6947 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6948 }
6949
6950 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6951 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6952 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6953 }
6954
6955 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6956 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6957 }
6958
6959 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6960 if (0 <= *flashState) {
6961 uint8_t fwk_flashState = (uint8_t) *flashState;
6962 if (!gCamCapability[mCameraId]->flash_available) {
6963 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6964 }
6965 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6966 }
6967 }
6968
6969 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6970 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6971 if (NAME_NOT_FOUND != val) {
6972 uint8_t fwk_flashMode = (uint8_t)val;
6973 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6974 }
6975 }
6976
6977 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6978 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6979 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6980 }
6981
6982 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6983 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6984 }
6985
6986 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6987 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6988 }
6989
6990 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6991 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6992 }
6993
6994 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6995 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6996 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6997 }
6998
6999 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7000 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7001 LOGD("fwk_videoStab = %d", fwk_videoStab);
7002 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7003 } else {
7004 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7005 // and so hardcoding the Video Stab result to OFF mode.
7006 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7007 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007008 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007009 }
7010
7011 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7012 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7013 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7014 }
7015
7016 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7017 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7018 }
7019
Thierry Strudel3d639192016-09-09 11:52:26 -07007020 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7021 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007022 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007023
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007024 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7025 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007026
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007027 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007028 blackLevelAppliedPattern->cam_black_level[0],
7029 blackLevelAppliedPattern->cam_black_level[1],
7030 blackLevelAppliedPattern->cam_black_level[2],
7031 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007032 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7033 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007034
7035#ifndef USE_HAL_3_3
7036 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307037 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007038 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307039 fwk_blackLevelInd[0] /= 16.0;
7040 fwk_blackLevelInd[1] /= 16.0;
7041 fwk_blackLevelInd[2] /= 16.0;
7042 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007043 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7044 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007045#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007046 }
7047
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007048#ifndef USE_HAL_3_3
7049 // Fixed whitelevel is used by ISP/Sensor
7050 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7051 &gCamCapability[mCameraId]->white_level, 1);
7052#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007053
7054 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7055 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7056 int32_t scalerCropRegion[4];
7057 scalerCropRegion[0] = hScalerCropRegion->left;
7058 scalerCropRegion[1] = hScalerCropRegion->top;
7059 scalerCropRegion[2] = hScalerCropRegion->width;
7060 scalerCropRegion[3] = hScalerCropRegion->height;
7061
7062 // Adjust crop region from sensor output coordinate system to active
7063 // array coordinate system.
7064 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7065 scalerCropRegion[2], scalerCropRegion[3]);
7066
7067 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7068 }
7069
7070 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7071 LOGD("sensorExpTime = %lld", *sensorExpTime);
7072 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7073 }
7074
7075 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7076 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7077 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7078 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7079 }
7080
7081 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7082 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7083 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7084 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7085 sensorRollingShutterSkew, 1);
7086 }
7087
7088 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7089 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7090 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7091
7092 //calculate the noise profile based on sensitivity
7093 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7094 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7095 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7096 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7097 noise_profile[i] = noise_profile_S;
7098 noise_profile[i+1] = noise_profile_O;
7099 }
7100 LOGD("noise model entry (S, O) is (%f, %f)",
7101 noise_profile_S, noise_profile_O);
7102 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7103 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7104 }
7105
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007106#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007107 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007108 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007109 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007110 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007111 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7112 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7113 }
7114 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007115#endif
7116
Thierry Strudel3d639192016-09-09 11:52:26 -07007117 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7118 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7119 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7120 }
7121
7122 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7123 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7124 *faceDetectMode);
7125 if (NAME_NOT_FOUND != val) {
7126 uint8_t fwk_faceDetectMode = (uint8_t)val;
7127 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7128
7129 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7130 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7131 CAM_INTF_META_FACE_DETECTION, metadata) {
7132 uint8_t numFaces = MIN(
7133 faceDetectionInfo->num_faces_detected, MAX_ROI);
7134 int32_t faceIds[MAX_ROI];
7135 uint8_t faceScores[MAX_ROI];
7136 int32_t faceRectangles[MAX_ROI * 4];
7137 int32_t faceLandmarks[MAX_ROI * 6];
7138 size_t j = 0, k = 0;
7139
7140 for (size_t i = 0; i < numFaces; i++) {
7141 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7142 // Adjust crop region from sensor output coordinate system to active
7143 // array coordinate system.
7144 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7145 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7146 rect.width, rect.height);
7147
7148 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7149 faceRectangles+j, -1);
7150
Jason Lee8ce36fa2017-04-19 19:40:37 -07007151 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7152 "bottom-right (%d, %d)",
7153 faceDetectionInfo->frame_id, i,
7154 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7155 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7156
Thierry Strudel3d639192016-09-09 11:52:26 -07007157 j+= 4;
7158 }
7159 if (numFaces <= 0) {
7160 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7161 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7162 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7163 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7164 }
7165
7166 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7167 numFaces);
7168 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7169 faceRectangles, numFaces * 4U);
7170 if (fwk_faceDetectMode ==
7171 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7172 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7173 CAM_INTF_META_FACE_LANDMARK, metadata) {
7174
7175 for (size_t i = 0; i < numFaces; i++) {
7176 // Map the co-ordinate sensor output coordinate system to active
7177 // array coordinate system.
7178 mCropRegionMapper.toActiveArray(
7179 landmarks->face_landmarks[i].left_eye_center.x,
7180 landmarks->face_landmarks[i].left_eye_center.y);
7181 mCropRegionMapper.toActiveArray(
7182 landmarks->face_landmarks[i].right_eye_center.x,
7183 landmarks->face_landmarks[i].right_eye_center.y);
7184 mCropRegionMapper.toActiveArray(
7185 landmarks->face_landmarks[i].mouth_center.x,
7186 landmarks->face_landmarks[i].mouth_center.y);
7187
7188 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007189
7190 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7191 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7192 faceDetectionInfo->frame_id, i,
7193 faceLandmarks[k + LEFT_EYE_X],
7194 faceLandmarks[k + LEFT_EYE_Y],
7195 faceLandmarks[k + RIGHT_EYE_X],
7196 faceLandmarks[k + RIGHT_EYE_Y],
7197 faceLandmarks[k + MOUTH_X],
7198 faceLandmarks[k + MOUTH_Y]);
7199
Thierry Strudel04e026f2016-10-10 11:27:36 -07007200 k+= TOTAL_LANDMARK_INDICES;
7201 }
7202 } else {
7203 for (size_t i = 0; i < numFaces; i++) {
7204 setInvalidLandmarks(faceLandmarks+k);
7205 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007206 }
7207 }
7208
Jason Lee49619db2017-04-13 12:07:22 -07007209 for (size_t i = 0; i < numFaces; i++) {
7210 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7211
7212 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7213 faceDetectionInfo->frame_id, i, faceIds[i]);
7214 }
7215
Thierry Strudel3d639192016-09-09 11:52:26 -07007216 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7217 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7218 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007219 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007220 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7221 CAM_INTF_META_FACE_BLINK, metadata) {
7222 uint8_t detected[MAX_ROI];
7223 uint8_t degree[MAX_ROI * 2];
7224 for (size_t i = 0; i < numFaces; i++) {
7225 detected[i] = blinks->blink[i].blink_detected;
7226 degree[2 * i] = blinks->blink[i].left_blink;
7227 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007228
Jason Lee49619db2017-04-13 12:07:22 -07007229 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7230 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7231 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7232 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007233 }
7234 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7235 detected, numFaces);
7236 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7237 degree, numFaces * 2);
7238 }
7239 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7240 CAM_INTF_META_FACE_SMILE, metadata) {
7241 uint8_t degree[MAX_ROI];
7242 uint8_t confidence[MAX_ROI];
7243 for (size_t i = 0; i < numFaces; i++) {
7244 degree[i] = smiles->smile[i].smile_degree;
7245 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007246
Jason Lee49619db2017-04-13 12:07:22 -07007247 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7248 "smile_degree=%d, smile_score=%d",
7249 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007250 }
7251 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7252 degree, numFaces);
7253 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7254 confidence, numFaces);
7255 }
7256 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7257 CAM_INTF_META_FACE_GAZE, metadata) {
7258 int8_t angle[MAX_ROI];
7259 int32_t direction[MAX_ROI * 3];
7260 int8_t degree[MAX_ROI * 2];
7261 for (size_t i = 0; i < numFaces; i++) {
7262 angle[i] = gazes->gaze[i].gaze_angle;
7263 direction[3 * i] = gazes->gaze[i].updown_dir;
7264 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7265 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7266 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7267 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007268
7269 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7270 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7271 "left_right_gaze=%d, top_bottom_gaze=%d",
7272 faceDetectionInfo->frame_id, i, angle[i],
7273 direction[3 * i], direction[3 * i + 1],
7274 direction[3 * i + 2],
7275 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007276 }
7277 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7278 (uint8_t *)angle, numFaces);
7279 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7280 direction, numFaces * 3);
7281 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7282 (uint8_t *)degree, numFaces * 2);
7283 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007284 }
7285 }
7286 }
7287 }
7288
7289 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7290 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007291 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007292 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007293 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007294
Shuzhen Wang14415f52016-11-16 18:26:18 -08007295 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7296 histogramBins = *histBins;
7297 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7298 }
7299
7300 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007301 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7302 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007303 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007304
7305 switch (stats_data->type) {
7306 case CAM_HISTOGRAM_TYPE_BAYER:
7307 switch (stats_data->bayer_stats.data_type) {
7308 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007309 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7310 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007311 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007312 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7313 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007314 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007315 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7316 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007317 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007318 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007319 case CAM_STATS_CHANNEL_R:
7320 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007321 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7322 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007323 }
7324 break;
7325 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007326 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007327 break;
7328 }
7329
Shuzhen Wang14415f52016-11-16 18:26:18 -08007330 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007331 }
7332 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007333 }
7334
7335 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7336 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7337 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7338 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7339 }
7340
7341 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7342 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7343 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7344 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7345 }
7346
7347 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7348 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7349 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7350 CAM_MAX_SHADING_MAP_HEIGHT);
7351 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7352 CAM_MAX_SHADING_MAP_WIDTH);
7353 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7354 lensShadingMap->lens_shading, 4U * map_width * map_height);
7355 }
7356
7357 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7358 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7359 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7360 }
7361
7362 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7363 //Populate CAM_INTF_META_TONEMAP_CURVES
7364 /* ch0 = G, ch 1 = B, ch 2 = R*/
7365 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7366 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7367 tonemap->tonemap_points_cnt,
7368 CAM_MAX_TONEMAP_CURVE_SIZE);
7369 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7370 }
7371
7372 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7373 &tonemap->curves[0].tonemap_points[0][0],
7374 tonemap->tonemap_points_cnt * 2);
7375
7376 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7377 &tonemap->curves[1].tonemap_points[0][0],
7378 tonemap->tonemap_points_cnt * 2);
7379
7380 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7381 &tonemap->curves[2].tonemap_points[0][0],
7382 tonemap->tonemap_points_cnt * 2);
7383 }
7384
7385 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7386 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7387 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7388 CC_GAIN_MAX);
7389 }
7390
7391 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7392 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7393 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7394 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7395 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7396 }
7397
7398 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7399 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7400 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7401 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7402 toneCurve->tonemap_points_cnt,
7403 CAM_MAX_TONEMAP_CURVE_SIZE);
7404 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7405 }
7406 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7407 (float*)toneCurve->curve.tonemap_points,
7408 toneCurve->tonemap_points_cnt * 2);
7409 }
7410
7411 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7412 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7413 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7414 predColorCorrectionGains->gains, 4);
7415 }
7416
7417 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7418 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7419 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7420 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7421 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7422 }
7423
7424 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7425 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7426 }
7427
7428 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7429 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7430 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7431 }
7432
7433 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7434 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7435 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7436 }
7437
7438 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7439 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7440 *effectMode);
7441 if (NAME_NOT_FOUND != val) {
7442 uint8_t fwk_effectMode = (uint8_t)val;
7443 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7444 }
7445 }
7446
7447 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7448 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7449 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7450 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7451 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7452 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7453 }
7454 int32_t fwk_testPatternData[4];
7455 fwk_testPatternData[0] = testPatternData->r;
7456 fwk_testPatternData[3] = testPatternData->b;
7457 switch (gCamCapability[mCameraId]->color_arrangement) {
7458 case CAM_FILTER_ARRANGEMENT_RGGB:
7459 case CAM_FILTER_ARRANGEMENT_GRBG:
7460 fwk_testPatternData[1] = testPatternData->gr;
7461 fwk_testPatternData[2] = testPatternData->gb;
7462 break;
7463 case CAM_FILTER_ARRANGEMENT_GBRG:
7464 case CAM_FILTER_ARRANGEMENT_BGGR:
7465 fwk_testPatternData[2] = testPatternData->gr;
7466 fwk_testPatternData[1] = testPatternData->gb;
7467 break;
7468 default:
7469 LOGE("color arrangement %d is not supported",
7470 gCamCapability[mCameraId]->color_arrangement);
7471 break;
7472 }
7473 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7474 }
7475
7476 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7477 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7478 }
7479
7480 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7481 String8 str((const char *)gps_methods);
7482 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7483 }
7484
7485 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7486 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7487 }
7488
7489 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7490 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7491 }
7492
7493 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7494 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7495 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7496 }
7497
7498 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7499 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7500 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7501 }
7502
7503 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7504 int32_t fwk_thumb_size[2];
7505 fwk_thumb_size[0] = thumb_size->width;
7506 fwk_thumb_size[1] = thumb_size->height;
7507 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7508 }
7509
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007510 // Skip reprocess metadata if there is no input stream.
7511 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7512 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7513 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7514 privateData,
7515 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7516 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007517 }
7518
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007519 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007520 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007521 meteringMode, 1);
7522 }
7523
Thierry Strudel54dc9782017-02-15 12:12:10 -08007524 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7525 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7526 LOGD("hdr_scene_data: %d %f\n",
7527 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7528 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7529 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7530 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7531 &isHdr, 1);
7532 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7533 &isHdrConfidence, 1);
7534 }
7535
7536
7537
Thierry Strudel3d639192016-09-09 11:52:26 -07007538 if (metadata->is_tuning_params_valid) {
7539 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7540 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7541 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7542
7543
7544 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7545 sizeof(uint32_t));
7546 data += sizeof(uint32_t);
7547
7548 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7549 sizeof(uint32_t));
7550 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7551 data += sizeof(uint32_t);
7552
7553 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7554 sizeof(uint32_t));
7555 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7556 data += sizeof(uint32_t);
7557
7558 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7559 sizeof(uint32_t));
7560 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7561 data += sizeof(uint32_t);
7562
7563 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7564 sizeof(uint32_t));
7565 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7566 data += sizeof(uint32_t);
7567
7568 metadata->tuning_params.tuning_mod3_data_size = 0;
7569 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7570 sizeof(uint32_t));
7571 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7572 data += sizeof(uint32_t);
7573
7574 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7575 TUNING_SENSOR_DATA_MAX);
7576 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7577 count);
7578 data += count;
7579
7580 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7581 TUNING_VFE_DATA_MAX);
7582 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7583 count);
7584 data += count;
7585
7586 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7587 TUNING_CPP_DATA_MAX);
7588 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7589 count);
7590 data += count;
7591
7592 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7593 TUNING_CAC_DATA_MAX);
7594 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7595 count);
7596 data += count;
7597
7598 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7599 (int32_t *)(void *)tuning_meta_data_blob,
7600 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7601 }
7602
7603 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7604 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7605 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7606 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7607 NEUTRAL_COL_POINTS);
7608 }
7609
7610 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7611 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7612 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7613 }
7614
7615 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7616 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7617 // Adjust crop region from sensor output coordinate system to active
7618 // array coordinate system.
7619 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7620 hAeRegions->rect.width, hAeRegions->rect.height);
7621
7622 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7623 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7624 REGIONS_TUPLE_COUNT);
7625 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7626 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7627 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7628 hAeRegions->rect.height);
7629 }
7630
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007631 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7632 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7633 if (NAME_NOT_FOUND != val) {
7634 uint8_t fwkAfMode = (uint8_t)val;
7635 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7636 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7637 } else {
7638 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7639 val);
7640 }
7641 }
7642
Thierry Strudel3d639192016-09-09 11:52:26 -07007643 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7644 uint8_t fwk_afState = (uint8_t) *afState;
7645 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007646 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007647 }
7648
7649 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7650 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7651 }
7652
7653 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7654 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7655 }
7656
7657 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7658 uint8_t fwk_lensState = *lensState;
7659 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7660 }
7661
Thierry Strudel3d639192016-09-09 11:52:26 -07007662
7663 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007664 uint32_t ab_mode = *hal_ab_mode;
7665 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7666 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7667 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7668 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007669 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007670 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007671 if (NAME_NOT_FOUND != val) {
7672 uint8_t fwk_ab_mode = (uint8_t)val;
7673 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7674 }
7675 }
7676
7677 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7678 int val = lookupFwkName(SCENE_MODES_MAP,
7679 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7680 if (NAME_NOT_FOUND != val) {
7681 uint8_t fwkBestshotMode = (uint8_t)val;
7682 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7683 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7684 } else {
7685 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7686 }
7687 }
7688
7689 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7690 uint8_t fwk_mode = (uint8_t) *mode;
7691 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7692 }
7693
7694 /* Constant metadata values to be update*/
7695 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7696 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7697
7698 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7699 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7700
7701 int32_t hotPixelMap[2];
7702 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7703
7704 // CDS
7705 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7706 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7707 }
7708
Thierry Strudel04e026f2016-10-10 11:27:36 -07007709 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7710 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007711 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007712 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7713 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7714 } else {
7715 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7716 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007717
7718 if(fwk_hdr != curr_hdr_state) {
7719 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7720 if(fwk_hdr)
7721 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7722 else
7723 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7724 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007725 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7726 }
7727
Thierry Strudel54dc9782017-02-15 12:12:10 -08007728 //binning correction
7729 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7730 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7731 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7732 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7733 }
7734
Thierry Strudel04e026f2016-10-10 11:27:36 -07007735 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007736 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007737 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7738 int8_t is_ir_on = 0;
7739
7740 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7741 if(is_ir_on != curr_ir_state) {
7742 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7743 if(is_ir_on)
7744 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7745 else
7746 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7747 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007748 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007749 }
7750
Thierry Strudel269c81a2016-10-12 12:13:59 -07007751 // AEC SPEED
7752 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7753 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7754 }
7755
7756 // AWB SPEED
7757 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7758 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7759 }
7760
Thierry Strudel3d639192016-09-09 11:52:26 -07007761 // TNR
7762 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7763 uint8_t tnr_enable = tnr->denoise_enable;
7764 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007765 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7766 int8_t is_tnr_on = 0;
7767
7768 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7769 if(is_tnr_on != curr_tnr_state) {
7770 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7771 if(is_tnr_on)
7772 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7773 else
7774 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7775 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007776
7777 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7778 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7779 }
7780
7781 // Reprocess crop data
7782 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7783 uint8_t cnt = crop_data->num_of_streams;
7784 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7785 // mm-qcamera-daemon only posts crop_data for streams
7786 // not linked to pproc. So no valid crop metadata is not
7787 // necessarily an error case.
7788 LOGD("No valid crop metadata entries");
7789 } else {
7790 uint32_t reproc_stream_id;
7791 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7792 LOGD("No reprocessible stream found, ignore crop data");
7793 } else {
7794 int rc = NO_ERROR;
7795 Vector<int32_t> roi_map;
7796 int32_t *crop = new int32_t[cnt*4];
7797 if (NULL == crop) {
7798 rc = NO_MEMORY;
7799 }
7800 if (NO_ERROR == rc) {
7801 int32_t streams_found = 0;
7802 for (size_t i = 0; i < cnt; i++) {
7803 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7804 if (pprocDone) {
7805 // HAL already does internal reprocessing,
7806 // either via reprocessing before JPEG encoding,
7807 // or offline postprocessing for pproc bypass case.
7808 crop[0] = 0;
7809 crop[1] = 0;
7810 crop[2] = mInputStreamInfo.dim.width;
7811 crop[3] = mInputStreamInfo.dim.height;
7812 } else {
7813 crop[0] = crop_data->crop_info[i].crop.left;
7814 crop[1] = crop_data->crop_info[i].crop.top;
7815 crop[2] = crop_data->crop_info[i].crop.width;
7816 crop[3] = crop_data->crop_info[i].crop.height;
7817 }
7818 roi_map.add(crop_data->crop_info[i].roi_map.left);
7819 roi_map.add(crop_data->crop_info[i].roi_map.top);
7820 roi_map.add(crop_data->crop_info[i].roi_map.width);
7821 roi_map.add(crop_data->crop_info[i].roi_map.height);
7822 streams_found++;
7823 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7824 crop[0], crop[1], crop[2], crop[3]);
7825 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7826 crop_data->crop_info[i].roi_map.left,
7827 crop_data->crop_info[i].roi_map.top,
7828 crop_data->crop_info[i].roi_map.width,
7829 crop_data->crop_info[i].roi_map.height);
7830 break;
7831
7832 }
7833 }
7834 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7835 &streams_found, 1);
7836 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7837 crop, (size_t)(streams_found * 4));
7838 if (roi_map.array()) {
7839 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7840 roi_map.array(), roi_map.size());
7841 }
7842 }
7843 if (crop) {
7844 delete [] crop;
7845 }
7846 }
7847 }
7848 }
7849
7850 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7851 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7852 // so hardcoding the CAC result to OFF mode.
7853 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7854 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7855 } else {
7856 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7857 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7858 *cacMode);
7859 if (NAME_NOT_FOUND != val) {
7860 uint8_t resultCacMode = (uint8_t)val;
7861 // check whether CAC result from CB is equal to Framework set CAC mode
7862 // If not equal then set the CAC mode came in corresponding request
7863 if (fwk_cacMode != resultCacMode) {
7864 resultCacMode = fwk_cacMode;
7865 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007866 //Check if CAC is disabled by property
7867 if (m_cacModeDisabled) {
7868 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7869 }
7870
Thierry Strudel3d639192016-09-09 11:52:26 -07007871 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7872 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7873 } else {
7874 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7875 }
7876 }
7877 }
7878
7879 // Post blob of cam_cds_data through vendor tag.
7880 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7881 uint8_t cnt = cdsInfo->num_of_streams;
7882 cam_cds_data_t cdsDataOverride;
7883 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7884 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7885 cdsDataOverride.num_of_streams = 1;
7886 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7887 uint32_t reproc_stream_id;
7888 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7889 LOGD("No reprocessible stream found, ignore cds data");
7890 } else {
7891 for (size_t i = 0; i < cnt; i++) {
7892 if (cdsInfo->cds_info[i].stream_id ==
7893 reproc_stream_id) {
7894 cdsDataOverride.cds_info[0].cds_enable =
7895 cdsInfo->cds_info[i].cds_enable;
7896 break;
7897 }
7898 }
7899 }
7900 } else {
7901 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7902 }
7903 camMetadata.update(QCAMERA3_CDS_INFO,
7904 (uint8_t *)&cdsDataOverride,
7905 sizeof(cam_cds_data_t));
7906 }
7907
7908 // Ldaf calibration data
7909 if (!mLdafCalibExist) {
7910 IF_META_AVAILABLE(uint32_t, ldafCalib,
7911 CAM_INTF_META_LDAF_EXIF, metadata) {
7912 mLdafCalibExist = true;
7913 mLdafCalib[0] = ldafCalib[0];
7914 mLdafCalib[1] = ldafCalib[1];
7915 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7916 ldafCalib[0], ldafCalib[1]);
7917 }
7918 }
7919
Thierry Strudel54dc9782017-02-15 12:12:10 -08007920 // EXIF debug data through vendor tag
7921 /*
7922 * Mobicat Mask can assume 3 values:
7923 * 1 refers to Mobicat data,
7924 * 2 refers to Stats Debug and Exif Debug Data
7925 * 3 refers to Mobicat and Stats Debug Data
7926 * We want to make sure that we are sending Exif debug data
7927 * only when Mobicat Mask is 2.
7928 */
7929 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7930 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7931 (uint8_t *)(void *)mExifParams.debug_params,
7932 sizeof(mm_jpeg_debug_exif_params_t));
7933 }
7934
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007935 // Reprocess and DDM debug data through vendor tag
7936 cam_reprocess_info_t repro_info;
7937 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007938 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7939 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007940 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007941 }
7942 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7943 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007944 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007945 }
7946 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7947 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007948 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007949 }
7950 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7951 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007952 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007953 }
7954 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7955 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007956 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007957 }
7958 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007959 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007960 }
7961 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7962 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007963 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007964 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007965 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7966 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7967 }
7968 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7969 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7970 }
7971 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7972 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007973
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007974 // INSTANT AEC MODE
7975 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7976 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7977 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7978 }
7979
Shuzhen Wange763e802016-03-31 10:24:29 -07007980 // AF scene change
7981 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7982 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7983 }
7984
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07007985 // Enable ZSL
7986 if (enableZsl != nullptr) {
7987 uint8_t value = *enableZsl ?
7988 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
7989 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
7990 }
7991
Xu Han821ea9c2017-05-23 09:00:40 -07007992 // OIS Data
7993 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
7994 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
7995 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
7996 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
7997 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
7998 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
7999 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8000 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8001 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8002 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8003 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
8004 }
8005
Thierry Strudel3d639192016-09-09 11:52:26 -07008006 resultMetadata = camMetadata.release();
8007 return resultMetadata;
8008}
8009
8010/*===========================================================================
8011 * FUNCTION : saveExifParams
8012 *
8013 * DESCRIPTION:
8014 *
8015 * PARAMETERS :
8016 * @metadata : metadata information from callback
8017 *
8018 * RETURN : none
8019 *
8020 *==========================================================================*/
8021void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8022{
8023 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8024 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8025 if (mExifParams.debug_params) {
8026 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8027 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8028 }
8029 }
8030 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8031 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8032 if (mExifParams.debug_params) {
8033 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8034 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8035 }
8036 }
8037 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8038 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8039 if (mExifParams.debug_params) {
8040 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8041 mExifParams.debug_params->af_debug_params_valid = TRUE;
8042 }
8043 }
8044 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8045 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8046 if (mExifParams.debug_params) {
8047 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8048 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8049 }
8050 }
8051 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8052 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8053 if (mExifParams.debug_params) {
8054 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8055 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8056 }
8057 }
8058 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8059 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8060 if (mExifParams.debug_params) {
8061 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8062 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8063 }
8064 }
8065 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8066 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8067 if (mExifParams.debug_params) {
8068 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8069 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8070 }
8071 }
8072 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8073 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8074 if (mExifParams.debug_params) {
8075 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8076 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8077 }
8078 }
8079}
8080
8081/*===========================================================================
8082 * FUNCTION : get3AExifParams
8083 *
8084 * DESCRIPTION:
8085 *
8086 * PARAMETERS : none
8087 *
8088 *
8089 * RETURN : mm_jpeg_exif_params_t
8090 *
8091 *==========================================================================*/
8092mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8093{
8094 return mExifParams;
8095}
8096
8097/*===========================================================================
8098 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8099 *
8100 * DESCRIPTION:
8101 *
8102 * PARAMETERS :
8103 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008104 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8105 * urgent metadata in a batch. Always true for
8106 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008107 *
8108 * RETURN : camera_metadata_t*
8109 * metadata in a format specified by fwk
8110 *==========================================================================*/
8111camera_metadata_t*
8112QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008113 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008114{
8115 CameraMetadata camMetadata;
8116 camera_metadata_t *resultMetadata;
8117
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008118 if (!lastUrgentMetadataInBatch) {
8119 /* In batch mode, use empty metadata if this is not the last in batch
8120 */
8121 resultMetadata = allocate_camera_metadata(0, 0);
8122 return resultMetadata;
8123 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008124
8125 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8126 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8127 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8128 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8129 }
8130
8131 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8132 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8133 &aecTrigger->trigger, 1);
8134 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8135 &aecTrigger->trigger_id, 1);
8136 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8137 aecTrigger->trigger);
8138 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8139 aecTrigger->trigger_id);
8140 }
8141
8142 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8143 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8144 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8145 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8146 }
8147
Thierry Strudel3d639192016-09-09 11:52:26 -07008148 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8149 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8150 &af_trigger->trigger, 1);
8151 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8152 af_trigger->trigger);
8153 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8154 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8155 af_trigger->trigger_id);
8156 }
8157
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008158 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8159 /*af regions*/
8160 int32_t afRegions[REGIONS_TUPLE_COUNT];
8161 // Adjust crop region from sensor output coordinate system to active
8162 // array coordinate system.
8163 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8164 hAfRegions->rect.width, hAfRegions->rect.height);
8165
8166 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8167 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8168 REGIONS_TUPLE_COUNT);
8169 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8170 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8171 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8172 hAfRegions->rect.height);
8173 }
8174
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008175 // AF region confidence
8176 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8177 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8178 }
8179
Thierry Strudel3d639192016-09-09 11:52:26 -07008180 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8181 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8182 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8183 if (NAME_NOT_FOUND != val) {
8184 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8185 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8186 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8187 } else {
8188 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8189 }
8190 }
8191
8192 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8193 uint32_t aeMode = CAM_AE_MODE_MAX;
8194 int32_t flashMode = CAM_FLASH_MODE_MAX;
8195 int32_t redeye = -1;
8196 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8197 aeMode = *pAeMode;
8198 }
8199 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8200 flashMode = *pFlashMode;
8201 }
8202 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8203 redeye = *pRedeye;
8204 }
8205
8206 if (1 == redeye) {
8207 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8208 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8209 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8210 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8211 flashMode);
8212 if (NAME_NOT_FOUND != val) {
8213 fwk_aeMode = (uint8_t)val;
8214 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8215 } else {
8216 LOGE("Unsupported flash mode %d", flashMode);
8217 }
8218 } else if (aeMode == CAM_AE_MODE_ON) {
8219 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8220 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8221 } else if (aeMode == CAM_AE_MODE_OFF) {
8222 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8223 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008224 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8225 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8226 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008227 } else {
8228 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8229 "flashMode:%d, aeMode:%u!!!",
8230 redeye, flashMode, aeMode);
8231 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008232 if (mInstantAEC) {
8233 // Increment frame Idx count untill a bound reached for instant AEC.
8234 mInstantAecFrameIdxCount++;
8235 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8236 CAM_INTF_META_AEC_INFO, metadata) {
8237 LOGH("ae_params->settled = %d",ae_params->settled);
8238 // If AEC settled, or if number of frames reached bound value,
8239 // should reset instant AEC.
8240 if (ae_params->settled ||
8241 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8242 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8243 mInstantAEC = false;
8244 mResetInstantAEC = true;
8245 mInstantAecFrameIdxCount = 0;
8246 }
8247 }
8248 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008249 resultMetadata = camMetadata.release();
8250 return resultMetadata;
8251}
8252
8253/*===========================================================================
8254 * FUNCTION : dumpMetadataToFile
8255 *
8256 * DESCRIPTION: Dumps tuning metadata to file system
8257 *
8258 * PARAMETERS :
8259 * @meta : tuning metadata
8260 * @dumpFrameCount : current dump frame count
8261 * @enabled : Enable mask
8262 *
8263 *==========================================================================*/
8264void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8265 uint32_t &dumpFrameCount,
8266 bool enabled,
8267 const char *type,
8268 uint32_t frameNumber)
8269{
8270 //Some sanity checks
8271 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8272 LOGE("Tuning sensor data size bigger than expected %d: %d",
8273 meta.tuning_sensor_data_size,
8274 TUNING_SENSOR_DATA_MAX);
8275 return;
8276 }
8277
8278 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8279 LOGE("Tuning VFE data size bigger than expected %d: %d",
8280 meta.tuning_vfe_data_size,
8281 TUNING_VFE_DATA_MAX);
8282 return;
8283 }
8284
8285 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8286 LOGE("Tuning CPP data size bigger than expected %d: %d",
8287 meta.tuning_cpp_data_size,
8288 TUNING_CPP_DATA_MAX);
8289 return;
8290 }
8291
8292 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8293 LOGE("Tuning CAC data size bigger than expected %d: %d",
8294 meta.tuning_cac_data_size,
8295 TUNING_CAC_DATA_MAX);
8296 return;
8297 }
8298 //
8299
8300 if(enabled){
8301 char timeBuf[FILENAME_MAX];
8302 char buf[FILENAME_MAX];
8303 memset(buf, 0, sizeof(buf));
8304 memset(timeBuf, 0, sizeof(timeBuf));
8305 time_t current_time;
8306 struct tm * timeinfo;
8307 time (&current_time);
8308 timeinfo = localtime (&current_time);
8309 if (timeinfo != NULL) {
8310 strftime (timeBuf, sizeof(timeBuf),
8311 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8312 }
8313 String8 filePath(timeBuf);
8314 snprintf(buf,
8315 sizeof(buf),
8316 "%dm_%s_%d.bin",
8317 dumpFrameCount,
8318 type,
8319 frameNumber);
8320 filePath.append(buf);
8321 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8322 if (file_fd >= 0) {
8323 ssize_t written_len = 0;
8324 meta.tuning_data_version = TUNING_DATA_VERSION;
8325 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8326 written_len += write(file_fd, data, sizeof(uint32_t));
8327 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8328 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8329 written_len += write(file_fd, data, sizeof(uint32_t));
8330 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8331 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8332 written_len += write(file_fd, data, sizeof(uint32_t));
8333 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8334 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8335 written_len += write(file_fd, data, sizeof(uint32_t));
8336 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8337 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8338 written_len += write(file_fd, data, sizeof(uint32_t));
8339 meta.tuning_mod3_data_size = 0;
8340 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8341 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8342 written_len += write(file_fd, data, sizeof(uint32_t));
8343 size_t total_size = meta.tuning_sensor_data_size;
8344 data = (void *)((uint8_t *)&meta.data);
8345 written_len += write(file_fd, data, total_size);
8346 total_size = meta.tuning_vfe_data_size;
8347 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8348 written_len += write(file_fd, data, total_size);
8349 total_size = meta.tuning_cpp_data_size;
8350 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8351 written_len += write(file_fd, data, total_size);
8352 total_size = meta.tuning_cac_data_size;
8353 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8354 written_len += write(file_fd, data, total_size);
8355 close(file_fd);
8356 }else {
8357 LOGE("fail to open file for metadata dumping");
8358 }
8359 }
8360}
8361
8362/*===========================================================================
8363 * FUNCTION : cleanAndSortStreamInfo
8364 *
8365 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8366 * and sort them such that raw stream is at the end of the list
8367 * This is a workaround for camera daemon constraint.
8368 *
8369 * PARAMETERS : None
8370 *
8371 *==========================================================================*/
8372void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8373{
8374 List<stream_info_t *> newStreamInfo;
8375
8376 /*clean up invalid streams*/
8377 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8378 it != mStreamInfo.end();) {
8379 if(((*it)->status) == INVALID){
8380 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8381 delete channel;
8382 free(*it);
8383 it = mStreamInfo.erase(it);
8384 } else {
8385 it++;
8386 }
8387 }
8388
8389 // Move preview/video/callback/snapshot streams into newList
8390 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8391 it != mStreamInfo.end();) {
8392 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8393 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8394 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8395 newStreamInfo.push_back(*it);
8396 it = mStreamInfo.erase(it);
8397 } else
8398 it++;
8399 }
8400 // Move raw streams into newList
8401 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8402 it != mStreamInfo.end();) {
8403 newStreamInfo.push_back(*it);
8404 it = mStreamInfo.erase(it);
8405 }
8406
8407 mStreamInfo = newStreamInfo;
8408}
8409
8410/*===========================================================================
8411 * FUNCTION : extractJpegMetadata
8412 *
8413 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8414 * JPEG metadata is cached in HAL, and return as part of capture
8415 * result when metadata is returned from camera daemon.
8416 *
8417 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8418 * @request: capture request
8419 *
8420 *==========================================================================*/
8421void QCamera3HardwareInterface::extractJpegMetadata(
8422 CameraMetadata& jpegMetadata,
8423 const camera3_capture_request_t *request)
8424{
8425 CameraMetadata frame_settings;
8426 frame_settings = request->settings;
8427
8428 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8429 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8430 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8431 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8432
8433 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8434 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8435 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8436 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8437
8438 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8439 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8440 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8441 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8442
8443 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8444 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8445 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8446 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8447
8448 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8449 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8450 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8451 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8452
8453 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8454 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8455 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8456 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8457
8458 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8459 int32_t thumbnail_size[2];
8460 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8461 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8462 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8463 int32_t orientation =
8464 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008465 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008466 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8467 int32_t temp;
8468 temp = thumbnail_size[0];
8469 thumbnail_size[0] = thumbnail_size[1];
8470 thumbnail_size[1] = temp;
8471 }
8472 }
8473 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8474 thumbnail_size,
8475 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8476 }
8477
8478}
8479
8480/*===========================================================================
8481 * FUNCTION : convertToRegions
8482 *
8483 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8484 *
8485 * PARAMETERS :
8486 * @rect : cam_rect_t struct to convert
8487 * @region : int32_t destination array
8488 * @weight : if we are converting from cam_area_t, weight is valid
8489 * else weight = -1
8490 *
8491 *==========================================================================*/
8492void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8493 int32_t *region, int weight)
8494{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008495 region[FACE_LEFT] = rect.left;
8496 region[FACE_TOP] = rect.top;
8497 region[FACE_RIGHT] = rect.left + rect.width;
8498 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008499 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008500 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008501 }
8502}
8503
8504/*===========================================================================
8505 * FUNCTION : convertFromRegions
8506 *
8507 * DESCRIPTION: helper method to convert from array to cam_rect_t
8508 *
8509 * PARAMETERS :
8510 * @rect : cam_rect_t struct to convert
8511 * @region : int32_t destination array
8512 * @weight : if we are converting from cam_area_t, weight is valid
8513 * else weight = -1
8514 *
8515 *==========================================================================*/
8516void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008517 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008518{
Thierry Strudel3d639192016-09-09 11:52:26 -07008519 int32_t x_min = frame_settings.find(tag).data.i32[0];
8520 int32_t y_min = frame_settings.find(tag).data.i32[1];
8521 int32_t x_max = frame_settings.find(tag).data.i32[2];
8522 int32_t y_max = frame_settings.find(tag).data.i32[3];
8523 roi.weight = frame_settings.find(tag).data.i32[4];
8524 roi.rect.left = x_min;
8525 roi.rect.top = y_min;
8526 roi.rect.width = x_max - x_min;
8527 roi.rect.height = y_max - y_min;
8528}
8529
8530/*===========================================================================
8531 * FUNCTION : resetIfNeededROI
8532 *
8533 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8534 * crop region
8535 *
8536 * PARAMETERS :
8537 * @roi : cam_area_t struct to resize
8538 * @scalerCropRegion : cam_crop_region_t region to compare against
8539 *
8540 *
8541 *==========================================================================*/
8542bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8543 const cam_crop_region_t* scalerCropRegion)
8544{
8545 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8546 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8547 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8548 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8549
8550 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8551 * without having this check the calculations below to validate if the roi
8552 * is inside scalar crop region will fail resulting in the roi not being
8553 * reset causing algorithm to continue to use stale roi window
8554 */
8555 if (roi->weight == 0) {
8556 return true;
8557 }
8558
8559 if ((roi_x_max < scalerCropRegion->left) ||
8560 // right edge of roi window is left of scalar crop's left edge
8561 (roi_y_max < scalerCropRegion->top) ||
8562 // bottom edge of roi window is above scalar crop's top edge
8563 (roi->rect.left > crop_x_max) ||
8564 // left edge of roi window is beyond(right) of scalar crop's right edge
8565 (roi->rect.top > crop_y_max)){
8566 // top edge of roi windo is above scalar crop's top edge
8567 return false;
8568 }
8569 if (roi->rect.left < scalerCropRegion->left) {
8570 roi->rect.left = scalerCropRegion->left;
8571 }
8572 if (roi->rect.top < scalerCropRegion->top) {
8573 roi->rect.top = scalerCropRegion->top;
8574 }
8575 if (roi_x_max > crop_x_max) {
8576 roi_x_max = crop_x_max;
8577 }
8578 if (roi_y_max > crop_y_max) {
8579 roi_y_max = crop_y_max;
8580 }
8581 roi->rect.width = roi_x_max - roi->rect.left;
8582 roi->rect.height = roi_y_max - roi->rect.top;
8583 return true;
8584}
8585
8586/*===========================================================================
8587 * FUNCTION : convertLandmarks
8588 *
8589 * DESCRIPTION: helper method to extract the landmarks from face detection info
8590 *
8591 * PARAMETERS :
8592 * @landmark_data : input landmark data to be converted
8593 * @landmarks : int32_t destination array
8594 *
8595 *
8596 *==========================================================================*/
8597void QCamera3HardwareInterface::convertLandmarks(
8598 cam_face_landmarks_info_t landmark_data,
8599 int32_t *landmarks)
8600{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008601 if (landmark_data.is_left_eye_valid) {
8602 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8603 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8604 } else {
8605 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8606 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8607 }
8608
8609 if (landmark_data.is_right_eye_valid) {
8610 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8611 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8612 } else {
8613 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8614 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8615 }
8616
8617 if (landmark_data.is_mouth_valid) {
8618 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8619 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8620 } else {
8621 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8622 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8623 }
8624}
8625
8626/*===========================================================================
8627 * FUNCTION : setInvalidLandmarks
8628 *
8629 * DESCRIPTION: helper method to set invalid landmarks
8630 *
8631 * PARAMETERS :
8632 * @landmarks : int32_t destination array
8633 *
8634 *
8635 *==========================================================================*/
8636void QCamera3HardwareInterface::setInvalidLandmarks(
8637 int32_t *landmarks)
8638{
8639 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8640 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8641 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8642 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8643 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8644 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008645}
8646
8647#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008648
8649/*===========================================================================
8650 * FUNCTION : getCapabilities
8651 *
8652 * DESCRIPTION: query camera capability from back-end
8653 *
8654 * PARAMETERS :
8655 * @ops : mm-interface ops structure
8656 * @cam_handle : camera handle for which we need capability
8657 *
8658 * RETURN : ptr type of capability structure
8659 * capability for success
8660 * NULL for failure
8661 *==========================================================================*/
8662cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8663 uint32_t cam_handle)
8664{
8665 int rc = NO_ERROR;
8666 QCamera3HeapMemory *capabilityHeap = NULL;
8667 cam_capability_t *cap_ptr = NULL;
8668
8669 if (ops == NULL) {
8670 LOGE("Invalid arguments");
8671 return NULL;
8672 }
8673
8674 capabilityHeap = new QCamera3HeapMemory(1);
8675 if (capabilityHeap == NULL) {
8676 LOGE("creation of capabilityHeap failed");
8677 return NULL;
8678 }
8679
8680 /* Allocate memory for capability buffer */
8681 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8682 if(rc != OK) {
8683 LOGE("No memory for cappability");
8684 goto allocate_failed;
8685 }
8686
8687 /* Map memory for capability buffer */
8688 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8689
8690 rc = ops->map_buf(cam_handle,
8691 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8692 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8693 if(rc < 0) {
8694 LOGE("failed to map capability buffer");
8695 rc = FAILED_TRANSACTION;
8696 goto map_failed;
8697 }
8698
8699 /* Query Capability */
8700 rc = ops->query_capability(cam_handle);
8701 if(rc < 0) {
8702 LOGE("failed to query capability");
8703 rc = FAILED_TRANSACTION;
8704 goto query_failed;
8705 }
8706
8707 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8708 if (cap_ptr == NULL) {
8709 LOGE("out of memory");
8710 rc = NO_MEMORY;
8711 goto query_failed;
8712 }
8713
8714 memset(cap_ptr, 0, sizeof(cam_capability_t));
8715 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8716
8717 int index;
8718 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8719 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8720 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8721 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8722 }
8723
8724query_failed:
8725 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8726map_failed:
8727 capabilityHeap->deallocate();
8728allocate_failed:
8729 delete capabilityHeap;
8730
8731 if (rc != NO_ERROR) {
8732 return NULL;
8733 } else {
8734 return cap_ptr;
8735 }
8736}
8737
Thierry Strudel3d639192016-09-09 11:52:26 -07008738/*===========================================================================
8739 * FUNCTION : initCapabilities
8740 *
8741 * DESCRIPTION: initialize camera capabilities in static data struct
8742 *
8743 * PARAMETERS :
8744 * @cameraId : camera Id
8745 *
8746 * RETURN : int32_t type of status
8747 * NO_ERROR -- success
8748 * none-zero failure code
8749 *==========================================================================*/
8750int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8751{
8752 int rc = 0;
8753 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008754 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008755
8756 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8757 if (rc) {
8758 LOGE("camera_open failed. rc = %d", rc);
8759 goto open_failed;
8760 }
8761 if (!cameraHandle) {
8762 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8763 goto open_failed;
8764 }
8765
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008766 handle = get_main_camera_handle(cameraHandle->camera_handle);
8767 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8768 if (gCamCapability[cameraId] == NULL) {
8769 rc = FAILED_TRANSACTION;
8770 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008771 }
8772
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008773 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008774 if (is_dual_camera_by_idx(cameraId)) {
8775 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8776 gCamCapability[cameraId]->aux_cam_cap =
8777 getCapabilities(cameraHandle->ops, handle);
8778 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8779 rc = FAILED_TRANSACTION;
8780 free(gCamCapability[cameraId]);
8781 goto failed_op;
8782 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008783
8784 // Copy the main camera capability to main_cam_cap struct
8785 gCamCapability[cameraId]->main_cam_cap =
8786 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8787 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8788 LOGE("out of memory");
8789 rc = NO_MEMORY;
8790 goto failed_op;
8791 }
8792 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8793 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008794 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008795failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008796 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8797 cameraHandle = NULL;
8798open_failed:
8799 return rc;
8800}
8801
8802/*==========================================================================
8803 * FUNCTION : get3Aversion
8804 *
8805 * DESCRIPTION: get the Q3A S/W version
8806 *
8807 * PARAMETERS :
8808 * @sw_version: Reference of Q3A structure which will hold version info upon
8809 * return
8810 *
8811 * RETURN : None
8812 *
8813 *==========================================================================*/
8814void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8815{
8816 if(gCamCapability[mCameraId])
8817 sw_version = gCamCapability[mCameraId]->q3a_version;
8818 else
8819 LOGE("Capability structure NULL!");
8820}
8821
8822
8823/*===========================================================================
8824 * FUNCTION : initParameters
8825 *
8826 * DESCRIPTION: initialize camera parameters
8827 *
8828 * PARAMETERS :
8829 *
8830 * RETURN : int32_t type of status
8831 * NO_ERROR -- success
8832 * none-zero failure code
8833 *==========================================================================*/
8834int QCamera3HardwareInterface::initParameters()
8835{
8836 int rc = 0;
8837
8838 //Allocate Set Param Buffer
8839 mParamHeap = new QCamera3HeapMemory(1);
8840 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8841 if(rc != OK) {
8842 rc = NO_MEMORY;
8843 LOGE("Failed to allocate SETPARM Heap memory");
8844 delete mParamHeap;
8845 mParamHeap = NULL;
8846 return rc;
8847 }
8848
8849 //Map memory for parameters buffer
8850 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8851 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8852 mParamHeap->getFd(0),
8853 sizeof(metadata_buffer_t),
8854 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8855 if(rc < 0) {
8856 LOGE("failed to map SETPARM buffer");
8857 rc = FAILED_TRANSACTION;
8858 mParamHeap->deallocate();
8859 delete mParamHeap;
8860 mParamHeap = NULL;
8861 return rc;
8862 }
8863
8864 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8865
8866 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8867 return rc;
8868}
8869
8870/*===========================================================================
8871 * FUNCTION : deinitParameters
8872 *
8873 * DESCRIPTION: de-initialize camera parameters
8874 *
8875 * PARAMETERS :
8876 *
8877 * RETURN : NONE
8878 *==========================================================================*/
8879void QCamera3HardwareInterface::deinitParameters()
8880{
8881 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8882 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8883
8884 mParamHeap->deallocate();
8885 delete mParamHeap;
8886 mParamHeap = NULL;
8887
8888 mParameters = NULL;
8889
8890 free(mPrevParameters);
8891 mPrevParameters = NULL;
8892}
8893
8894/*===========================================================================
8895 * FUNCTION : calcMaxJpegSize
8896 *
8897 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8898 *
8899 * PARAMETERS :
8900 *
8901 * RETURN : max_jpeg_size
8902 *==========================================================================*/
8903size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8904{
8905 size_t max_jpeg_size = 0;
8906 size_t temp_width, temp_height;
8907 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8908 MAX_SIZES_CNT);
8909 for (size_t i = 0; i < count; i++) {
8910 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8911 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8912 if (temp_width * temp_height > max_jpeg_size ) {
8913 max_jpeg_size = temp_width * temp_height;
8914 }
8915 }
8916 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8917 return max_jpeg_size;
8918}
8919
8920/*===========================================================================
8921 * FUNCTION : getMaxRawSize
8922 *
8923 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8924 *
8925 * PARAMETERS :
8926 *
8927 * RETURN : Largest supported Raw Dimension
8928 *==========================================================================*/
8929cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8930{
8931 int max_width = 0;
8932 cam_dimension_t maxRawSize;
8933
8934 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8935 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8936 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8937 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8938 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8939 }
8940 }
8941 return maxRawSize;
8942}
8943
8944
8945/*===========================================================================
8946 * FUNCTION : calcMaxJpegDim
8947 *
8948 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8949 *
8950 * PARAMETERS :
8951 *
8952 * RETURN : max_jpeg_dim
8953 *==========================================================================*/
8954cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8955{
8956 cam_dimension_t max_jpeg_dim;
8957 cam_dimension_t curr_jpeg_dim;
8958 max_jpeg_dim.width = 0;
8959 max_jpeg_dim.height = 0;
8960 curr_jpeg_dim.width = 0;
8961 curr_jpeg_dim.height = 0;
8962 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8963 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8964 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8965 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8966 max_jpeg_dim.width * max_jpeg_dim.height ) {
8967 max_jpeg_dim.width = curr_jpeg_dim.width;
8968 max_jpeg_dim.height = curr_jpeg_dim.height;
8969 }
8970 }
8971 return max_jpeg_dim;
8972}
8973
8974/*===========================================================================
8975 * FUNCTION : addStreamConfig
8976 *
8977 * DESCRIPTION: adds the stream configuration to the array
8978 *
8979 * PARAMETERS :
8980 * @available_stream_configs : pointer to stream configuration array
8981 * @scalar_format : scalar format
8982 * @dim : configuration dimension
8983 * @config_type : input or output configuration type
8984 *
8985 * RETURN : NONE
8986 *==========================================================================*/
8987void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8988 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8989{
8990 available_stream_configs.add(scalar_format);
8991 available_stream_configs.add(dim.width);
8992 available_stream_configs.add(dim.height);
8993 available_stream_configs.add(config_type);
8994}
8995
8996/*===========================================================================
8997 * FUNCTION : suppportBurstCapture
8998 *
8999 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9000 *
9001 * PARAMETERS :
9002 * @cameraId : camera Id
9003 *
9004 * RETURN : true if camera supports BURST_CAPTURE
9005 * false otherwise
9006 *==========================================================================*/
9007bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9008{
9009 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9010 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9011 const int32_t highResWidth = 3264;
9012 const int32_t highResHeight = 2448;
9013
9014 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9015 // Maximum resolution images cannot be captured at >= 10fps
9016 // -> not supporting BURST_CAPTURE
9017 return false;
9018 }
9019
9020 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9021 // Maximum resolution images can be captured at >= 20fps
9022 // --> supporting BURST_CAPTURE
9023 return true;
9024 }
9025
9026 // Find the smallest highRes resolution, or largest resolution if there is none
9027 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9028 MAX_SIZES_CNT);
9029 size_t highRes = 0;
9030 while ((highRes + 1 < totalCnt) &&
9031 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9032 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9033 highResWidth * highResHeight)) {
9034 highRes++;
9035 }
9036 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9037 return true;
9038 } else {
9039 return false;
9040 }
9041}
9042
9043/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009044 * FUNCTION : getPDStatIndex
9045 *
9046 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9047 *
9048 * PARAMETERS :
9049 * @caps : camera capabilities
9050 *
9051 * RETURN : int32_t type
9052 * non-negative - on success
9053 * -1 - on failure
9054 *==========================================================================*/
9055int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9056 if (nullptr == caps) {
9057 return -1;
9058 }
9059
9060 uint32_t metaRawCount = caps->meta_raw_channel_count;
9061 int32_t ret = -1;
9062 for (size_t i = 0; i < metaRawCount; i++) {
9063 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9064 ret = i;
9065 break;
9066 }
9067 }
9068
9069 return ret;
9070}
9071
9072/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009073 * FUNCTION : initStaticMetadata
9074 *
9075 * DESCRIPTION: initialize the static metadata
9076 *
9077 * PARAMETERS :
9078 * @cameraId : camera Id
9079 *
9080 * RETURN : int32_t type of status
9081 * 0 -- success
9082 * non-zero failure code
9083 *==========================================================================*/
9084int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9085{
9086 int rc = 0;
9087 CameraMetadata staticInfo;
9088 size_t count = 0;
9089 bool limitedDevice = false;
9090 char prop[PROPERTY_VALUE_MAX];
9091 bool supportBurst = false;
9092
9093 supportBurst = supportBurstCapture(cameraId);
9094
9095 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9096 * guaranteed or if min fps of max resolution is less than 20 fps, its
9097 * advertised as limited device*/
9098 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9099 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9100 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9101 !supportBurst;
9102
9103 uint8_t supportedHwLvl = limitedDevice ?
9104 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009105#ifndef USE_HAL_3_3
9106 // LEVEL_3 - This device will support level 3.
9107 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9108#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009109 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009110#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009111
9112 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9113 &supportedHwLvl, 1);
9114
9115 bool facingBack = false;
9116 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9117 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9118 facingBack = true;
9119 }
9120 /*HAL 3 only*/
9121 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9122 &gCamCapability[cameraId]->min_focus_distance, 1);
9123
9124 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9125 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9126
9127 /*should be using focal lengths but sensor doesn't provide that info now*/
9128 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9129 &gCamCapability[cameraId]->focal_length,
9130 1);
9131
9132 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9133 gCamCapability[cameraId]->apertures,
9134 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9135
9136 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9137 gCamCapability[cameraId]->filter_densities,
9138 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9139
9140
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009141 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9142 size_t mode_count =
9143 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9144 for (size_t i = 0; i < mode_count; i++) {
9145 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9146 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009147 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009148 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009149
9150 int32_t lens_shading_map_size[] = {
9151 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9152 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9153 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9154 lens_shading_map_size,
9155 sizeof(lens_shading_map_size)/sizeof(int32_t));
9156
9157 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9158 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9159
9160 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9161 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9162
9163 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9164 &gCamCapability[cameraId]->max_frame_duration, 1);
9165
9166 camera_metadata_rational baseGainFactor = {
9167 gCamCapability[cameraId]->base_gain_factor.numerator,
9168 gCamCapability[cameraId]->base_gain_factor.denominator};
9169 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9170 &baseGainFactor, 1);
9171
9172 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9173 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9174
9175 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9176 gCamCapability[cameraId]->pixel_array_size.height};
9177 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9178 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9179
9180 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9181 gCamCapability[cameraId]->active_array_size.top,
9182 gCamCapability[cameraId]->active_array_size.width,
9183 gCamCapability[cameraId]->active_array_size.height};
9184 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9185 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9186
9187 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9188 &gCamCapability[cameraId]->white_level, 1);
9189
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009190 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9191 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9192 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009193 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009194 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009195
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009196#ifndef USE_HAL_3_3
9197 bool hasBlackRegions = false;
9198 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9199 LOGW("black_region_count: %d is bounded to %d",
9200 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9201 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9202 }
9203 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9204 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9205 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9206 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9207 }
9208 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9209 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9210 hasBlackRegions = true;
9211 }
9212#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009213 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9214 &gCamCapability[cameraId]->flash_charge_duration, 1);
9215
9216 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9217 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9218
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009219 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9220 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9221 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009222 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9223 &timestampSource, 1);
9224
Thierry Strudel54dc9782017-02-15 12:12:10 -08009225 //update histogram vendor data
9226 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009227 &gCamCapability[cameraId]->histogram_size, 1);
9228
Thierry Strudel54dc9782017-02-15 12:12:10 -08009229 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009230 &gCamCapability[cameraId]->max_histogram_count, 1);
9231
Shuzhen Wang14415f52016-11-16 18:26:18 -08009232 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9233 //so that app can request fewer number of bins than the maximum supported.
9234 std::vector<int32_t> histBins;
9235 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9236 histBins.push_back(maxHistBins);
9237 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9238 (maxHistBins & 0x1) == 0) {
9239 histBins.push_back(maxHistBins >> 1);
9240 maxHistBins >>= 1;
9241 }
9242 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9243 histBins.data(), histBins.size());
9244
Thierry Strudel3d639192016-09-09 11:52:26 -07009245 int32_t sharpness_map_size[] = {
9246 gCamCapability[cameraId]->sharpness_map_size.width,
9247 gCamCapability[cameraId]->sharpness_map_size.height};
9248
9249 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9250 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9251
9252 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9253 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9254
Emilian Peev0f3c3162017-03-15 12:57:46 +00009255 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9256 if (0 <= indexPD) {
9257 // Advertise PD stats data as part of the Depth capabilities
9258 int32_t depthWidth =
9259 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9260 int32_t depthHeight =
9261 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9262 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9263 assert(0 < depthSamplesCount);
9264 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9265 &depthSamplesCount, 1);
9266
9267 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9268 depthHeight,
9269 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9270 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9271 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9272 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9273 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9274
9275 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9276 depthHeight, 33333333,
9277 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9278 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9279 depthMinDuration,
9280 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9281
9282 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9283 depthHeight, 0,
9284 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9285 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9286 depthStallDuration,
9287 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9288
9289 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9290 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9291 }
9292
Thierry Strudel3d639192016-09-09 11:52:26 -07009293 int32_t scalar_formats[] = {
9294 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9295 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9296 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9297 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9298 HAL_PIXEL_FORMAT_RAW10,
9299 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009300 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9301 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9302 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009303
9304 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9305 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9306 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9307 count, MAX_SIZES_CNT, available_processed_sizes);
9308 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9309 available_processed_sizes, count * 2);
9310
9311 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9312 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9313 makeTable(gCamCapability[cameraId]->raw_dim,
9314 count, MAX_SIZES_CNT, available_raw_sizes);
9315 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9316 available_raw_sizes, count * 2);
9317
9318 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9319 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9320 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9321 count, MAX_SIZES_CNT, available_fps_ranges);
9322 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9323 available_fps_ranges, count * 2);
9324
9325 camera_metadata_rational exposureCompensationStep = {
9326 gCamCapability[cameraId]->exp_compensation_step.numerator,
9327 gCamCapability[cameraId]->exp_compensation_step.denominator};
9328 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9329 &exposureCompensationStep, 1);
9330
9331 Vector<uint8_t> availableVstabModes;
9332 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9333 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009334 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009335 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009336 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009337 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009338 count = IS_TYPE_MAX;
9339 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9340 for (size_t i = 0; i < count; i++) {
9341 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9342 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9343 eisSupported = true;
9344 break;
9345 }
9346 }
9347 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009348 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9349 }
9350 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9351 availableVstabModes.array(), availableVstabModes.size());
9352
9353 /*HAL 1 and HAL 3 common*/
9354 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9355 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9356 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009357 // Cap the max zoom to the max preferred value
9358 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009359 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9360 &maxZoom, 1);
9361
9362 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9363 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9364
9365 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9366 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9367 max3aRegions[2] = 0; /* AF not supported */
9368 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9369 max3aRegions, 3);
9370
9371 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9372 memset(prop, 0, sizeof(prop));
9373 property_get("persist.camera.facedetect", prop, "1");
9374 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9375 LOGD("Support face detection mode: %d",
9376 supportedFaceDetectMode);
9377
9378 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009379 /* support mode should be OFF if max number of face is 0 */
9380 if (maxFaces <= 0) {
9381 supportedFaceDetectMode = 0;
9382 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009383 Vector<uint8_t> availableFaceDetectModes;
9384 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9385 if (supportedFaceDetectMode == 1) {
9386 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9387 } else if (supportedFaceDetectMode == 2) {
9388 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9389 } else if (supportedFaceDetectMode == 3) {
9390 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9391 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9392 } else {
9393 maxFaces = 0;
9394 }
9395 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9396 availableFaceDetectModes.array(),
9397 availableFaceDetectModes.size());
9398 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9399 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009400 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9401 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9402 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009403
9404 int32_t exposureCompensationRange[] = {
9405 gCamCapability[cameraId]->exposure_compensation_min,
9406 gCamCapability[cameraId]->exposure_compensation_max};
9407 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9408 exposureCompensationRange,
9409 sizeof(exposureCompensationRange)/sizeof(int32_t));
9410
9411 uint8_t lensFacing = (facingBack) ?
9412 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9413 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9414
9415 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9416 available_thumbnail_sizes,
9417 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9418
9419 /*all sizes will be clubbed into this tag*/
9420 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9421 /*android.scaler.availableStreamConfigurations*/
9422 Vector<int32_t> available_stream_configs;
9423 cam_dimension_t active_array_dim;
9424 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9425 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009426
9427 /*advertise list of input dimensions supported based on below property.
9428 By default all sizes upto 5MP will be advertised.
9429 Note that the setprop resolution format should be WxH.
9430 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9431 To list all supported sizes, setprop needs to be set with "0x0" */
9432 cam_dimension_t minInputSize = {2592,1944}; //5MP
9433 memset(prop, 0, sizeof(prop));
9434 property_get("persist.camera.input.minsize", prop, "2592x1944");
9435 if (strlen(prop) > 0) {
9436 char *saveptr = NULL;
9437 char *token = strtok_r(prop, "x", &saveptr);
9438 if (token != NULL) {
9439 minInputSize.width = atoi(token);
9440 }
9441 token = strtok_r(NULL, "x", &saveptr);
9442 if (token != NULL) {
9443 minInputSize.height = atoi(token);
9444 }
9445 }
9446
Thierry Strudel3d639192016-09-09 11:52:26 -07009447 /* Add input/output stream configurations for each scalar formats*/
9448 for (size_t j = 0; j < scalar_formats_count; j++) {
9449 switch (scalar_formats[j]) {
9450 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9451 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9452 case HAL_PIXEL_FORMAT_RAW10:
9453 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9454 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9455 addStreamConfig(available_stream_configs, scalar_formats[j],
9456 gCamCapability[cameraId]->raw_dim[i],
9457 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9458 }
9459 break;
9460 case HAL_PIXEL_FORMAT_BLOB:
9461 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9462 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9463 addStreamConfig(available_stream_configs, scalar_formats[j],
9464 gCamCapability[cameraId]->picture_sizes_tbl[i],
9465 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9466 }
9467 break;
9468 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9469 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9470 default:
9471 cam_dimension_t largest_picture_size;
9472 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9473 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9474 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9475 addStreamConfig(available_stream_configs, scalar_formats[j],
9476 gCamCapability[cameraId]->picture_sizes_tbl[i],
9477 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009478 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009479 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9480 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009481 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9482 >= minInputSize.width) || (gCamCapability[cameraId]->
9483 picture_sizes_tbl[i].height >= minInputSize.height)) {
9484 addStreamConfig(available_stream_configs, scalar_formats[j],
9485 gCamCapability[cameraId]->picture_sizes_tbl[i],
9486 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9487 }
9488 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009489 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009490
Thierry Strudel3d639192016-09-09 11:52:26 -07009491 break;
9492 }
9493 }
9494
9495 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9496 available_stream_configs.array(), available_stream_configs.size());
9497 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9498 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9499
9500 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9501 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9502
9503 /* android.scaler.availableMinFrameDurations */
9504 Vector<int64_t> available_min_durations;
9505 for (size_t j = 0; j < scalar_formats_count; j++) {
9506 switch (scalar_formats[j]) {
9507 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9508 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9509 case HAL_PIXEL_FORMAT_RAW10:
9510 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9511 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9512 available_min_durations.add(scalar_formats[j]);
9513 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9514 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9515 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9516 }
9517 break;
9518 default:
9519 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9520 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9521 available_min_durations.add(scalar_formats[j]);
9522 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9523 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9524 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9525 }
9526 break;
9527 }
9528 }
9529 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9530 available_min_durations.array(), available_min_durations.size());
9531
9532 Vector<int32_t> available_hfr_configs;
9533 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9534 int32_t fps = 0;
9535 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9536 case CAM_HFR_MODE_60FPS:
9537 fps = 60;
9538 break;
9539 case CAM_HFR_MODE_90FPS:
9540 fps = 90;
9541 break;
9542 case CAM_HFR_MODE_120FPS:
9543 fps = 120;
9544 break;
9545 case CAM_HFR_MODE_150FPS:
9546 fps = 150;
9547 break;
9548 case CAM_HFR_MODE_180FPS:
9549 fps = 180;
9550 break;
9551 case CAM_HFR_MODE_210FPS:
9552 fps = 210;
9553 break;
9554 case CAM_HFR_MODE_240FPS:
9555 fps = 240;
9556 break;
9557 case CAM_HFR_MODE_480FPS:
9558 fps = 480;
9559 break;
9560 case CAM_HFR_MODE_OFF:
9561 case CAM_HFR_MODE_MAX:
9562 default:
9563 break;
9564 }
9565
9566 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9567 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9568 /* For each HFR frame rate, need to advertise one variable fps range
9569 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9570 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9571 * set by the app. When video recording is started, [120, 120] is
9572 * set. This way sensor configuration does not change when recording
9573 * is started */
9574
9575 /* (width, height, fps_min, fps_max, batch_size_max) */
9576 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9577 j < MAX_SIZES_CNT; j++) {
9578 available_hfr_configs.add(
9579 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9580 available_hfr_configs.add(
9581 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9582 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9583 available_hfr_configs.add(fps);
9584 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9585
9586 /* (width, height, fps_min, fps_max, batch_size_max) */
9587 available_hfr_configs.add(
9588 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9589 available_hfr_configs.add(
9590 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9591 available_hfr_configs.add(fps);
9592 available_hfr_configs.add(fps);
9593 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9594 }
9595 }
9596 }
9597 //Advertise HFR capability only if the property is set
9598 memset(prop, 0, sizeof(prop));
9599 property_get("persist.camera.hal3hfr.enable", prop, "1");
9600 uint8_t hfrEnable = (uint8_t)atoi(prop);
9601
9602 if(hfrEnable && available_hfr_configs.array()) {
9603 staticInfo.update(
9604 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9605 available_hfr_configs.array(), available_hfr_configs.size());
9606 }
9607
9608 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9609 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9610 &max_jpeg_size, 1);
9611
9612 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9613 size_t size = 0;
9614 count = CAM_EFFECT_MODE_MAX;
9615 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9616 for (size_t i = 0; i < count; i++) {
9617 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9618 gCamCapability[cameraId]->supported_effects[i]);
9619 if (NAME_NOT_FOUND != val) {
9620 avail_effects[size] = (uint8_t)val;
9621 size++;
9622 }
9623 }
9624 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9625 avail_effects,
9626 size);
9627
9628 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9629 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9630 size_t supported_scene_modes_cnt = 0;
9631 count = CAM_SCENE_MODE_MAX;
9632 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9633 for (size_t i = 0; i < count; i++) {
9634 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9635 CAM_SCENE_MODE_OFF) {
9636 int val = lookupFwkName(SCENE_MODES_MAP,
9637 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9638 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009639
Thierry Strudel3d639192016-09-09 11:52:26 -07009640 if (NAME_NOT_FOUND != val) {
9641 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9642 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9643 supported_scene_modes_cnt++;
9644 }
9645 }
9646 }
9647 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9648 avail_scene_modes,
9649 supported_scene_modes_cnt);
9650
9651 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9652 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9653 supported_scene_modes_cnt,
9654 CAM_SCENE_MODE_MAX,
9655 scene_mode_overrides,
9656 supported_indexes,
9657 cameraId);
9658
9659 if (supported_scene_modes_cnt == 0) {
9660 supported_scene_modes_cnt = 1;
9661 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9662 }
9663
9664 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9665 scene_mode_overrides, supported_scene_modes_cnt * 3);
9666
9667 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9668 ANDROID_CONTROL_MODE_AUTO,
9669 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9670 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9671 available_control_modes,
9672 3);
9673
9674 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9675 size = 0;
9676 count = CAM_ANTIBANDING_MODE_MAX;
9677 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9678 for (size_t i = 0; i < count; i++) {
9679 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9680 gCamCapability[cameraId]->supported_antibandings[i]);
9681 if (NAME_NOT_FOUND != val) {
9682 avail_antibanding_modes[size] = (uint8_t)val;
9683 size++;
9684 }
9685
9686 }
9687 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9688 avail_antibanding_modes,
9689 size);
9690
9691 uint8_t avail_abberation_modes[] = {
9692 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9693 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9694 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9695 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9696 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9697 if (0 == count) {
9698 // If no aberration correction modes are available for a device, this advertise OFF mode
9699 size = 1;
9700 } else {
9701 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9702 // So, advertize all 3 modes if atleast any one mode is supported as per the
9703 // new M requirement
9704 size = 3;
9705 }
9706 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9707 avail_abberation_modes,
9708 size);
9709
9710 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9711 size = 0;
9712 count = CAM_FOCUS_MODE_MAX;
9713 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9714 for (size_t i = 0; i < count; i++) {
9715 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9716 gCamCapability[cameraId]->supported_focus_modes[i]);
9717 if (NAME_NOT_FOUND != val) {
9718 avail_af_modes[size] = (uint8_t)val;
9719 size++;
9720 }
9721 }
9722 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9723 avail_af_modes,
9724 size);
9725
9726 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9727 size = 0;
9728 count = CAM_WB_MODE_MAX;
9729 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9730 for (size_t i = 0; i < count; i++) {
9731 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9732 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9733 gCamCapability[cameraId]->supported_white_balances[i]);
9734 if (NAME_NOT_FOUND != val) {
9735 avail_awb_modes[size] = (uint8_t)val;
9736 size++;
9737 }
9738 }
9739 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9740 avail_awb_modes,
9741 size);
9742
9743 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9744 count = CAM_FLASH_FIRING_LEVEL_MAX;
9745 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9746 count);
9747 for (size_t i = 0; i < count; i++) {
9748 available_flash_levels[i] =
9749 gCamCapability[cameraId]->supported_firing_levels[i];
9750 }
9751 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9752 available_flash_levels, count);
9753
9754 uint8_t flashAvailable;
9755 if (gCamCapability[cameraId]->flash_available)
9756 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9757 else
9758 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9759 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9760 &flashAvailable, 1);
9761
9762 Vector<uint8_t> avail_ae_modes;
9763 count = CAM_AE_MODE_MAX;
9764 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9765 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009766 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9767 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9768 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9769 }
9770 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009771 }
9772 if (flashAvailable) {
9773 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9774 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9775 }
9776 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9777 avail_ae_modes.array(),
9778 avail_ae_modes.size());
9779
9780 int32_t sensitivity_range[2];
9781 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9782 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9783 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9784 sensitivity_range,
9785 sizeof(sensitivity_range) / sizeof(int32_t));
9786
9787 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9788 &gCamCapability[cameraId]->max_analog_sensitivity,
9789 1);
9790
9791 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9792 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9793 &sensor_orientation,
9794 1);
9795
9796 int32_t max_output_streams[] = {
9797 MAX_STALLING_STREAMS,
9798 MAX_PROCESSED_STREAMS,
9799 MAX_RAW_STREAMS};
9800 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9801 max_output_streams,
9802 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9803
9804 uint8_t avail_leds = 0;
9805 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9806 &avail_leds, 0);
9807
9808 uint8_t focus_dist_calibrated;
9809 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9810 gCamCapability[cameraId]->focus_dist_calibrated);
9811 if (NAME_NOT_FOUND != val) {
9812 focus_dist_calibrated = (uint8_t)val;
9813 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9814 &focus_dist_calibrated, 1);
9815 }
9816
9817 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9818 size = 0;
9819 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9820 MAX_TEST_PATTERN_CNT);
9821 for (size_t i = 0; i < count; i++) {
9822 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9823 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9824 if (NAME_NOT_FOUND != testpatternMode) {
9825 avail_testpattern_modes[size] = testpatternMode;
9826 size++;
9827 }
9828 }
9829 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9830 avail_testpattern_modes,
9831 size);
9832
9833 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9834 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9835 &max_pipeline_depth,
9836 1);
9837
9838 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9839 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9840 &partial_result_count,
9841 1);
9842
9843 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9844 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9845
9846 Vector<uint8_t> available_capabilities;
9847 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9848 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9849 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9850 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9851 if (supportBurst) {
9852 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9853 }
9854 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9855 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9856 if (hfrEnable && available_hfr_configs.array()) {
9857 available_capabilities.add(
9858 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9859 }
9860
9861 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9862 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9863 }
9864 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9865 available_capabilities.array(),
9866 available_capabilities.size());
9867
9868 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9869 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9870 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9871 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9872
9873 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9874 &aeLockAvailable, 1);
9875
9876 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9877 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9878 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9879 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9880
9881 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9882 &awbLockAvailable, 1);
9883
9884 int32_t max_input_streams = 1;
9885 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9886 &max_input_streams,
9887 1);
9888
9889 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9890 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9891 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9892 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9893 HAL_PIXEL_FORMAT_YCbCr_420_888};
9894 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9895 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9896
9897 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9898 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9899 &max_latency,
9900 1);
9901
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009902#ifndef USE_HAL_3_3
9903 int32_t isp_sensitivity_range[2];
9904 isp_sensitivity_range[0] =
9905 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9906 isp_sensitivity_range[1] =
9907 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9908 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9909 isp_sensitivity_range,
9910 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9911#endif
9912
Thierry Strudel3d639192016-09-09 11:52:26 -07009913 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9914 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9915 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9916 available_hot_pixel_modes,
9917 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9918
9919 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9920 ANDROID_SHADING_MODE_FAST,
9921 ANDROID_SHADING_MODE_HIGH_QUALITY};
9922 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9923 available_shading_modes,
9924 3);
9925
9926 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9927 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9928 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9929 available_lens_shading_map_modes,
9930 2);
9931
9932 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9933 ANDROID_EDGE_MODE_FAST,
9934 ANDROID_EDGE_MODE_HIGH_QUALITY,
9935 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9936 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9937 available_edge_modes,
9938 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9939
9940 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9941 ANDROID_NOISE_REDUCTION_MODE_FAST,
9942 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9943 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9944 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9945 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9946 available_noise_red_modes,
9947 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9948
9949 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9950 ANDROID_TONEMAP_MODE_FAST,
9951 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9952 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9953 available_tonemap_modes,
9954 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9955
9956 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9957 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9958 available_hot_pixel_map_modes,
9959 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9960
9961 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9962 gCamCapability[cameraId]->reference_illuminant1);
9963 if (NAME_NOT_FOUND != val) {
9964 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9965 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9966 }
9967
9968 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9969 gCamCapability[cameraId]->reference_illuminant2);
9970 if (NAME_NOT_FOUND != val) {
9971 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9972 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9973 }
9974
9975 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9976 (void *)gCamCapability[cameraId]->forward_matrix1,
9977 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9978
9979 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9980 (void *)gCamCapability[cameraId]->forward_matrix2,
9981 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9982
9983 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9984 (void *)gCamCapability[cameraId]->color_transform1,
9985 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9986
9987 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9988 (void *)gCamCapability[cameraId]->color_transform2,
9989 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9990
9991 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9992 (void *)gCamCapability[cameraId]->calibration_transform1,
9993 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9994
9995 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9996 (void *)gCamCapability[cameraId]->calibration_transform2,
9997 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9998
9999 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10000 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10001 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10002 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10003 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10004 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10005 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10006 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10007 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10008 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10009 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10010 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10011 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10012 ANDROID_JPEG_GPS_COORDINATES,
10013 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10014 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10015 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10016 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10017 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10018 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10019 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10020 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10021 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10022 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010023#ifndef USE_HAL_3_3
10024 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10025#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010026 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010027 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010028 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10029 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010030 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010031 /* DevCamDebug metadata request_keys_basic */
10032 DEVCAMDEBUG_META_ENABLE,
10033 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010034 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010035 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010036 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010037 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -080010038 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010039
10040 size_t request_keys_cnt =
10041 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10042 Vector<int32_t> available_request_keys;
10043 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10044 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10045 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10046 }
10047
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010048 if (gExposeEnableZslKey) {
Chien-Yu Chen3b630e52017-06-02 15:39:47 -070010049 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || cameraId == 0) {
10050 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10051 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010052 }
10053
Thierry Strudel3d639192016-09-09 11:52:26 -070010054 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10055 available_request_keys.array(), available_request_keys.size());
10056
10057 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10058 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10059 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10060 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10061 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10062 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10063 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10064 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10065 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10066 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10067 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10068 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10069 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10070 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10071 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10072 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10073 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010074 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010075 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10076 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10077 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010078 ANDROID_STATISTICS_FACE_SCORES,
10079#ifndef USE_HAL_3_3
10080 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10081#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010082 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010083 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010084 // DevCamDebug metadata result_keys_basic
10085 DEVCAMDEBUG_META_ENABLE,
10086 // DevCamDebug metadata result_keys AF
10087 DEVCAMDEBUG_AF_LENS_POSITION,
10088 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10089 DEVCAMDEBUG_AF_TOF_DISTANCE,
10090 DEVCAMDEBUG_AF_LUMA,
10091 DEVCAMDEBUG_AF_HAF_STATE,
10092 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10093 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10094 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10095 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10096 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10097 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10098 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10099 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10100 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10101 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10102 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10103 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10104 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10105 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10106 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10107 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10108 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10109 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10110 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10111 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10112 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10113 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10114 // DevCamDebug metadata result_keys AEC
10115 DEVCAMDEBUG_AEC_TARGET_LUMA,
10116 DEVCAMDEBUG_AEC_COMP_LUMA,
10117 DEVCAMDEBUG_AEC_AVG_LUMA,
10118 DEVCAMDEBUG_AEC_CUR_LUMA,
10119 DEVCAMDEBUG_AEC_LINECOUNT,
10120 DEVCAMDEBUG_AEC_REAL_GAIN,
10121 DEVCAMDEBUG_AEC_EXP_INDEX,
10122 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010123 // DevCamDebug metadata result_keys zzHDR
10124 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10125 DEVCAMDEBUG_AEC_L_LINECOUNT,
10126 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10127 DEVCAMDEBUG_AEC_S_LINECOUNT,
10128 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10129 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10130 // DevCamDebug metadata result_keys ADRC
10131 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10132 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10133 DEVCAMDEBUG_AEC_GTM_RATIO,
10134 DEVCAMDEBUG_AEC_LTM_RATIO,
10135 DEVCAMDEBUG_AEC_LA_RATIO,
10136 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010137 // DevCamDebug metadata result_keys AWB
10138 DEVCAMDEBUG_AWB_R_GAIN,
10139 DEVCAMDEBUG_AWB_G_GAIN,
10140 DEVCAMDEBUG_AWB_B_GAIN,
10141 DEVCAMDEBUG_AWB_CCT,
10142 DEVCAMDEBUG_AWB_DECISION,
10143 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010144 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10145 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10146 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010147 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010148 };
10149
Thierry Strudel3d639192016-09-09 11:52:26 -070010150 size_t result_keys_cnt =
10151 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10152
10153 Vector<int32_t> available_result_keys;
10154 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10155 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10156 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10157 }
10158 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10159 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10160 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10161 }
10162 if (supportedFaceDetectMode == 1) {
10163 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10164 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10165 } else if ((supportedFaceDetectMode == 2) ||
10166 (supportedFaceDetectMode == 3)) {
10167 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10168 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10169 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010170#ifndef USE_HAL_3_3
10171 if (hasBlackRegions) {
10172 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10173 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10174 }
10175#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010176
10177 if (gExposeEnableZslKey) {
10178 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10179 }
10180
Thierry Strudel3d639192016-09-09 11:52:26 -070010181 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10182 available_result_keys.array(), available_result_keys.size());
10183
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010184 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010185 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10186 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10187 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10188 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10189 ANDROID_SCALER_CROPPING_TYPE,
10190 ANDROID_SYNC_MAX_LATENCY,
10191 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10192 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10193 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10194 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10195 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10196 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10197 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10198 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10199 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10200 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10201 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10202 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10203 ANDROID_LENS_FACING,
10204 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10205 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10206 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10207 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10208 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10209 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10210 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10211 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10212 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10213 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10214 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10215 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10216 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10217 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10218 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10219 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10220 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10221 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10222 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10223 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010224 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010225 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10226 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10227 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10228 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10229 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10230 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10231 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10232 ANDROID_CONTROL_AVAILABLE_MODES,
10233 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10234 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10235 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10236 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010237 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10238#ifndef USE_HAL_3_3
10239 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10240 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10241#endif
10242 };
10243
10244 Vector<int32_t> available_characteristics_keys;
10245 available_characteristics_keys.appendArray(characteristics_keys_basic,
10246 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10247#ifndef USE_HAL_3_3
10248 if (hasBlackRegions) {
10249 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10250 }
10251#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010252
10253 if (0 <= indexPD) {
10254 int32_t depthKeys[] = {
10255 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10256 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10257 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10258 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10259 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10260 };
10261 available_characteristics_keys.appendArray(depthKeys,
10262 sizeof(depthKeys) / sizeof(depthKeys[0]));
10263 }
10264
Thierry Strudel3d639192016-09-09 11:52:26 -070010265 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010266 available_characteristics_keys.array(),
10267 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010268
10269 /*available stall durations depend on the hw + sw and will be different for different devices */
10270 /*have to add for raw after implementation*/
10271 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10272 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10273
10274 Vector<int64_t> available_stall_durations;
10275 for (uint32_t j = 0; j < stall_formats_count; j++) {
10276 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10277 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10278 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10279 available_stall_durations.add(stall_formats[j]);
10280 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10281 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10282 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10283 }
10284 } else {
10285 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10286 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10287 available_stall_durations.add(stall_formats[j]);
10288 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10289 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10290 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10291 }
10292 }
10293 }
10294 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10295 available_stall_durations.array(),
10296 available_stall_durations.size());
10297
10298 //QCAMERA3_OPAQUE_RAW
10299 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10300 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10301 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10302 case LEGACY_RAW:
10303 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10304 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10305 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10306 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10307 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10308 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10309 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10310 break;
10311 case MIPI_RAW:
10312 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10313 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10314 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10315 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10316 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10317 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10318 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10319 break;
10320 default:
10321 LOGE("unknown opaque_raw_format %d",
10322 gCamCapability[cameraId]->opaque_raw_fmt);
10323 break;
10324 }
10325 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10326
10327 Vector<int32_t> strides;
10328 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10329 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10330 cam_stream_buf_plane_info_t buf_planes;
10331 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10332 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10333 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10334 &gCamCapability[cameraId]->padding_info, &buf_planes);
10335 strides.add(buf_planes.plane_info.mp[0].stride);
10336 }
10337 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10338 strides.size());
10339
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010340 //TBD: remove the following line once backend advertises zzHDR in feature mask
10341 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010342 //Video HDR default
10343 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10344 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010345 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010346 int32_t vhdr_mode[] = {
10347 QCAMERA3_VIDEO_HDR_MODE_OFF,
10348 QCAMERA3_VIDEO_HDR_MODE_ON};
10349
10350 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10351 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10352 vhdr_mode, vhdr_mode_count);
10353 }
10354
Thierry Strudel3d639192016-09-09 11:52:26 -070010355 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10356 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10357 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10358
10359 uint8_t isMonoOnly =
10360 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10361 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10362 &isMonoOnly, 1);
10363
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010364#ifndef USE_HAL_3_3
10365 Vector<int32_t> opaque_size;
10366 for (size_t j = 0; j < scalar_formats_count; j++) {
10367 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10368 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10369 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10370 cam_stream_buf_plane_info_t buf_planes;
10371
10372 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10373 &gCamCapability[cameraId]->padding_info, &buf_planes);
10374
10375 if (rc == 0) {
10376 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10377 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10378 opaque_size.add(buf_planes.plane_info.frame_len);
10379 }else {
10380 LOGE("raw frame calculation failed!");
10381 }
10382 }
10383 }
10384 }
10385
10386 if ((opaque_size.size() > 0) &&
10387 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10388 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10389 else
10390 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10391#endif
10392
Thierry Strudel04e026f2016-10-10 11:27:36 -070010393 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10394 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10395 size = 0;
10396 count = CAM_IR_MODE_MAX;
10397 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10398 for (size_t i = 0; i < count; i++) {
10399 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10400 gCamCapability[cameraId]->supported_ir_modes[i]);
10401 if (NAME_NOT_FOUND != val) {
10402 avail_ir_modes[size] = (int32_t)val;
10403 size++;
10404 }
10405 }
10406 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10407 avail_ir_modes, size);
10408 }
10409
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010410 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10411 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10412 size = 0;
10413 count = CAM_AEC_CONVERGENCE_MAX;
10414 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10415 for (size_t i = 0; i < count; i++) {
10416 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10417 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10418 if (NAME_NOT_FOUND != val) {
10419 available_instant_aec_modes[size] = (int32_t)val;
10420 size++;
10421 }
10422 }
10423 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10424 available_instant_aec_modes, size);
10425 }
10426
Thierry Strudel54dc9782017-02-15 12:12:10 -080010427 int32_t sharpness_range[] = {
10428 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10429 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10430 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10431
10432 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10433 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10434 size = 0;
10435 count = CAM_BINNING_CORRECTION_MODE_MAX;
10436 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10437 for (size_t i = 0; i < count; i++) {
10438 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10439 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10440 gCamCapability[cameraId]->supported_binning_modes[i]);
10441 if (NAME_NOT_FOUND != val) {
10442 avail_binning_modes[size] = (int32_t)val;
10443 size++;
10444 }
10445 }
10446 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10447 avail_binning_modes, size);
10448 }
10449
10450 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10451 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10452 size = 0;
10453 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10454 for (size_t i = 0; i < count; i++) {
10455 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10456 gCamCapability[cameraId]->supported_aec_modes[i]);
10457 if (NAME_NOT_FOUND != val)
10458 available_aec_modes[size++] = val;
10459 }
10460 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10461 available_aec_modes, size);
10462 }
10463
10464 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10465 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10466 size = 0;
10467 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10468 for (size_t i = 0; i < count; i++) {
10469 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10470 gCamCapability[cameraId]->supported_iso_modes[i]);
10471 if (NAME_NOT_FOUND != val)
10472 available_iso_modes[size++] = val;
10473 }
10474 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10475 available_iso_modes, size);
10476 }
10477
10478 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010479 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010480 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10481 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10482 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10483
10484 int32_t available_saturation_range[4];
10485 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10486 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10487 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10488 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10489 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10490 available_saturation_range, 4);
10491
10492 uint8_t is_hdr_values[2];
10493 is_hdr_values[0] = 0;
10494 is_hdr_values[1] = 1;
10495 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10496 is_hdr_values, 2);
10497
10498 float is_hdr_confidence_range[2];
10499 is_hdr_confidence_range[0] = 0.0;
10500 is_hdr_confidence_range[1] = 1.0;
10501 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10502 is_hdr_confidence_range, 2);
10503
Emilian Peev0a972ef2017-03-16 10:25:53 +000010504 size_t eepromLength = strnlen(
10505 reinterpret_cast<const char *>(
10506 gCamCapability[cameraId]->eeprom_version_info),
10507 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10508 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010509 char easelInfo[] = ",E:N";
10510 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10511 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10512 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010513 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10514 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010515 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010516 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10517 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10518 }
10519
Thierry Strudel3d639192016-09-09 11:52:26 -070010520 gStaticMetadata[cameraId] = staticInfo.release();
10521 return rc;
10522}
10523
10524/*===========================================================================
10525 * FUNCTION : makeTable
10526 *
10527 * DESCRIPTION: make a table of sizes
10528 *
10529 * PARAMETERS :
10530 *
10531 *
10532 *==========================================================================*/
10533void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10534 size_t max_size, int32_t *sizeTable)
10535{
10536 size_t j = 0;
10537 if (size > max_size) {
10538 size = max_size;
10539 }
10540 for (size_t i = 0; i < size; i++) {
10541 sizeTable[j] = dimTable[i].width;
10542 sizeTable[j+1] = dimTable[i].height;
10543 j+=2;
10544 }
10545}
10546
10547/*===========================================================================
10548 * FUNCTION : makeFPSTable
10549 *
10550 * DESCRIPTION: make a table of fps ranges
10551 *
10552 * PARAMETERS :
10553 *
10554 *==========================================================================*/
10555void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10556 size_t max_size, int32_t *fpsRangesTable)
10557{
10558 size_t j = 0;
10559 if (size > max_size) {
10560 size = max_size;
10561 }
10562 for (size_t i = 0; i < size; i++) {
10563 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10564 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10565 j+=2;
10566 }
10567}
10568
10569/*===========================================================================
10570 * FUNCTION : makeOverridesList
10571 *
10572 * DESCRIPTION: make a list of scene mode overrides
10573 *
10574 * PARAMETERS :
10575 *
10576 *
10577 *==========================================================================*/
10578void QCamera3HardwareInterface::makeOverridesList(
10579 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10580 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10581{
10582 /*daemon will give a list of overrides for all scene modes.
10583 However we should send the fwk only the overrides for the scene modes
10584 supported by the framework*/
10585 size_t j = 0;
10586 if (size > max_size) {
10587 size = max_size;
10588 }
10589 size_t focus_count = CAM_FOCUS_MODE_MAX;
10590 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10591 focus_count);
10592 for (size_t i = 0; i < size; i++) {
10593 bool supt = false;
10594 size_t index = supported_indexes[i];
10595 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10596 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10597 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10598 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10599 overridesTable[index].awb_mode);
10600 if (NAME_NOT_FOUND != val) {
10601 overridesList[j+1] = (uint8_t)val;
10602 }
10603 uint8_t focus_override = overridesTable[index].af_mode;
10604 for (size_t k = 0; k < focus_count; k++) {
10605 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10606 supt = true;
10607 break;
10608 }
10609 }
10610 if (supt) {
10611 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10612 focus_override);
10613 if (NAME_NOT_FOUND != val) {
10614 overridesList[j+2] = (uint8_t)val;
10615 }
10616 } else {
10617 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10618 }
10619 j+=3;
10620 }
10621}
10622
10623/*===========================================================================
10624 * FUNCTION : filterJpegSizes
10625 *
10626 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10627 * could be downscaled to
10628 *
10629 * PARAMETERS :
10630 *
10631 * RETURN : length of jpegSizes array
10632 *==========================================================================*/
10633
10634size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10635 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10636 uint8_t downscale_factor)
10637{
10638 if (0 == downscale_factor) {
10639 downscale_factor = 1;
10640 }
10641
10642 int32_t min_width = active_array_size.width / downscale_factor;
10643 int32_t min_height = active_array_size.height / downscale_factor;
10644 size_t jpegSizesCnt = 0;
10645 if (processedSizesCnt > maxCount) {
10646 processedSizesCnt = maxCount;
10647 }
10648 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10649 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10650 jpegSizes[jpegSizesCnt] = processedSizes[i];
10651 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10652 jpegSizesCnt += 2;
10653 }
10654 }
10655 return jpegSizesCnt;
10656}
10657
10658/*===========================================================================
10659 * FUNCTION : computeNoiseModelEntryS
10660 *
10661 * DESCRIPTION: function to map a given sensitivity to the S noise
10662 * model parameters in the DNG noise model.
10663 *
10664 * PARAMETERS : sens : the sensor sensitivity
10665 *
10666 ** RETURN : S (sensor amplification) noise
10667 *
10668 *==========================================================================*/
10669double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10670 double s = gCamCapability[mCameraId]->gradient_S * sens +
10671 gCamCapability[mCameraId]->offset_S;
10672 return ((s < 0.0) ? 0.0 : s);
10673}
10674
10675/*===========================================================================
10676 * FUNCTION : computeNoiseModelEntryO
10677 *
10678 * DESCRIPTION: function to map a given sensitivity to the O noise
10679 * model parameters in the DNG noise model.
10680 *
10681 * PARAMETERS : sens : the sensor sensitivity
10682 *
10683 ** RETURN : O (sensor readout) noise
10684 *
10685 *==========================================================================*/
10686double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10687 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10688 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10689 1.0 : (1.0 * sens / max_analog_sens);
10690 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10691 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10692 return ((o < 0.0) ? 0.0 : o);
10693}
10694
10695/*===========================================================================
10696 * FUNCTION : getSensorSensitivity
10697 *
10698 * DESCRIPTION: convert iso_mode to an integer value
10699 *
10700 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10701 *
10702 ** RETURN : sensitivity supported by sensor
10703 *
10704 *==========================================================================*/
10705int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10706{
10707 int32_t sensitivity;
10708
10709 switch (iso_mode) {
10710 case CAM_ISO_MODE_100:
10711 sensitivity = 100;
10712 break;
10713 case CAM_ISO_MODE_200:
10714 sensitivity = 200;
10715 break;
10716 case CAM_ISO_MODE_400:
10717 sensitivity = 400;
10718 break;
10719 case CAM_ISO_MODE_800:
10720 sensitivity = 800;
10721 break;
10722 case CAM_ISO_MODE_1600:
10723 sensitivity = 1600;
10724 break;
10725 default:
10726 sensitivity = -1;
10727 break;
10728 }
10729 return sensitivity;
10730}
10731
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010732int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010733 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010734 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10735 // to connect to Easel.
10736 bool doNotpowerOnEasel =
10737 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10738
10739 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010740 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10741 return OK;
10742 }
10743
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010744 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010745 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010746 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010747 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010748 return res;
10749 }
10750
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010751 EaselManagerClientOpened = true;
10752
10753 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010754 if (res != OK) {
10755 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10756 }
10757
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010758 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010759 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010760
10761 // Expose enableZsl key only when HDR+ mode is enabled.
10762 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010763 }
10764
10765 return OK;
10766}
10767
Thierry Strudel3d639192016-09-09 11:52:26 -070010768/*===========================================================================
10769 * FUNCTION : getCamInfo
10770 *
10771 * DESCRIPTION: query camera capabilities
10772 *
10773 * PARAMETERS :
10774 * @cameraId : camera Id
10775 * @info : camera info struct to be filled in with camera capabilities
10776 *
10777 * RETURN : int type of status
10778 * NO_ERROR -- success
10779 * none-zero failure code
10780 *==========================================================================*/
10781int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10782 struct camera_info *info)
10783{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010784 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010785 int rc = 0;
10786
10787 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010788
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010789 {
10790 Mutex::Autolock l(gHdrPlusClientLock);
10791 rc = initHdrPlusClientLocked();
10792 if (rc != OK) {
10793 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10794 pthread_mutex_unlock(&gCamLock);
10795 return rc;
10796 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010797 }
10798
Thierry Strudel3d639192016-09-09 11:52:26 -070010799 if (NULL == gCamCapability[cameraId]) {
10800 rc = initCapabilities(cameraId);
10801 if (rc < 0) {
10802 pthread_mutex_unlock(&gCamLock);
10803 return rc;
10804 }
10805 }
10806
10807 if (NULL == gStaticMetadata[cameraId]) {
10808 rc = initStaticMetadata(cameraId);
10809 if (rc < 0) {
10810 pthread_mutex_unlock(&gCamLock);
10811 return rc;
10812 }
10813 }
10814
10815 switch(gCamCapability[cameraId]->position) {
10816 case CAM_POSITION_BACK:
10817 case CAM_POSITION_BACK_AUX:
10818 info->facing = CAMERA_FACING_BACK;
10819 break;
10820
10821 case CAM_POSITION_FRONT:
10822 case CAM_POSITION_FRONT_AUX:
10823 info->facing = CAMERA_FACING_FRONT;
10824 break;
10825
10826 default:
10827 LOGE("Unknown position type %d for camera id:%d",
10828 gCamCapability[cameraId]->position, cameraId);
10829 rc = -1;
10830 break;
10831 }
10832
10833
10834 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010835#ifndef USE_HAL_3_3
10836 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10837#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010838 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010839#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010840 info->static_camera_characteristics = gStaticMetadata[cameraId];
10841
10842 //For now assume both cameras can operate independently.
10843 info->conflicting_devices = NULL;
10844 info->conflicting_devices_length = 0;
10845
10846 //resource cost is 100 * MIN(1.0, m/M),
10847 //where m is throughput requirement with maximum stream configuration
10848 //and M is CPP maximum throughput.
10849 float max_fps = 0.0;
10850 for (uint32_t i = 0;
10851 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10852 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10853 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10854 }
10855 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10856 gCamCapability[cameraId]->active_array_size.width *
10857 gCamCapability[cameraId]->active_array_size.height * max_fps /
10858 gCamCapability[cameraId]->max_pixel_bandwidth;
10859 info->resource_cost = 100 * MIN(1.0, ratio);
10860 LOGI("camera %d resource cost is %d", cameraId,
10861 info->resource_cost);
10862
10863 pthread_mutex_unlock(&gCamLock);
10864 return rc;
10865}
10866
10867/*===========================================================================
10868 * FUNCTION : translateCapabilityToMetadata
10869 *
10870 * DESCRIPTION: translate the capability into camera_metadata_t
10871 *
10872 * PARAMETERS : type of the request
10873 *
10874 *
10875 * RETURN : success: camera_metadata_t*
10876 * failure: NULL
10877 *
10878 *==========================================================================*/
10879camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10880{
10881 if (mDefaultMetadata[type] != NULL) {
10882 return mDefaultMetadata[type];
10883 }
10884 //first time we are handling this request
10885 //fill up the metadata structure using the wrapper class
10886 CameraMetadata settings;
10887 //translate from cam_capability_t to camera_metadata_tag_t
10888 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10889 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10890 int32_t defaultRequestID = 0;
10891 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10892
10893 /* OIS disable */
10894 char ois_prop[PROPERTY_VALUE_MAX];
10895 memset(ois_prop, 0, sizeof(ois_prop));
10896 property_get("persist.camera.ois.disable", ois_prop, "0");
10897 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10898
10899 /* Force video to use OIS */
10900 char videoOisProp[PROPERTY_VALUE_MAX];
10901 memset(videoOisProp, 0, sizeof(videoOisProp));
10902 property_get("persist.camera.ois.video", videoOisProp, "1");
10903 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010904
10905 // Hybrid AE enable/disable
10906 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10907 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10908 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10909 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10910
Thierry Strudel3d639192016-09-09 11:52:26 -070010911 uint8_t controlIntent = 0;
10912 uint8_t focusMode;
10913 uint8_t vsMode;
10914 uint8_t optStabMode;
10915 uint8_t cacMode;
10916 uint8_t edge_mode;
10917 uint8_t noise_red_mode;
10918 uint8_t tonemap_mode;
10919 bool highQualityModeEntryAvailable = FALSE;
10920 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010921 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010922 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10923 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010924 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010925 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010926 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010927
Thierry Strudel3d639192016-09-09 11:52:26 -070010928 switch (type) {
10929 case CAMERA3_TEMPLATE_PREVIEW:
10930 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10931 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10932 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10933 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10934 edge_mode = ANDROID_EDGE_MODE_FAST;
10935 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10936 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10937 break;
10938 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10939 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10940 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10941 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10942 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10943 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10944 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10945 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10946 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10947 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10948 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10949 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10950 highQualityModeEntryAvailable = TRUE;
10951 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10952 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10953 fastModeEntryAvailable = TRUE;
10954 }
10955 }
10956 if (highQualityModeEntryAvailable) {
10957 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10958 } else if (fastModeEntryAvailable) {
10959 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10960 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010961 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10962 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10963 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010964 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010965 break;
10966 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10967 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10968 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10969 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010970 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10971 edge_mode = ANDROID_EDGE_MODE_FAST;
10972 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10973 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10974 if (forceVideoOis)
10975 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10976 break;
10977 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10978 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10979 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10980 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010981 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10982 edge_mode = ANDROID_EDGE_MODE_FAST;
10983 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10984 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10985 if (forceVideoOis)
10986 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10987 break;
10988 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10989 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10990 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10991 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10992 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10993 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10994 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10995 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10996 break;
10997 case CAMERA3_TEMPLATE_MANUAL:
10998 edge_mode = ANDROID_EDGE_MODE_FAST;
10999 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11000 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11001 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11002 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11003 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11004 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11005 break;
11006 default:
11007 edge_mode = ANDROID_EDGE_MODE_FAST;
11008 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11009 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11010 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11011 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11012 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11013 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11014 break;
11015 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011016 // Set CAC to OFF if underlying device doesn't support
11017 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11018 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11019 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011020 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11021 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11022 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11023 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11024 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11025 }
11026 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011027 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011028 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011029
11030 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11031 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11032 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11033 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11034 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11035 || ois_disable)
11036 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11037 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011038 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011039
11040 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11041 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11042
11043 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11044 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11045
11046 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11047 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11048
11049 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11050 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11051
11052 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11053 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11054
11055 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11056 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11057
11058 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11059 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11060
11061 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11062 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11063
11064 /*flash*/
11065 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11066 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11067
11068 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11069 settings.update(ANDROID_FLASH_FIRING_POWER,
11070 &flashFiringLevel, 1);
11071
11072 /* lens */
11073 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11074 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11075
11076 if (gCamCapability[mCameraId]->filter_densities_count) {
11077 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11078 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11079 gCamCapability[mCameraId]->filter_densities_count);
11080 }
11081
11082 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11083 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11084
Thierry Strudel3d639192016-09-09 11:52:26 -070011085 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11086 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11087
11088 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11089 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11090
11091 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11092 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11093
11094 /* face detection (default to OFF) */
11095 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11096 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11097
Thierry Strudel54dc9782017-02-15 12:12:10 -080011098 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11099 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011100
11101 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11102 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11103
11104 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11105 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11106
Thierry Strudel3d639192016-09-09 11:52:26 -070011107
11108 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11109 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11110
11111 /* Exposure time(Update the Min Exposure Time)*/
11112 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11113 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11114
11115 /* frame duration */
11116 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11117 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11118
11119 /* sensitivity */
11120 static const int32_t default_sensitivity = 100;
11121 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011122#ifndef USE_HAL_3_3
11123 static const int32_t default_isp_sensitivity =
11124 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11125 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11126#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011127
11128 /*edge mode*/
11129 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11130
11131 /*noise reduction mode*/
11132 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11133
11134 /*color correction mode*/
11135 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11136 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11137
11138 /*transform matrix mode*/
11139 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11140
11141 int32_t scaler_crop_region[4];
11142 scaler_crop_region[0] = 0;
11143 scaler_crop_region[1] = 0;
11144 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11145 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11146 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11147
11148 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11149 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11150
11151 /*focus distance*/
11152 float focus_distance = 0.0;
11153 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11154
11155 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011156 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011157 float max_range = 0.0;
11158 float max_fixed_fps = 0.0;
11159 int32_t fps_range[2] = {0, 0};
11160 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11161 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011162 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11163 TEMPLATE_MAX_PREVIEW_FPS) {
11164 continue;
11165 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011166 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11167 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11168 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11169 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11170 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11171 if (range > max_range) {
11172 fps_range[0] =
11173 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11174 fps_range[1] =
11175 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11176 max_range = range;
11177 }
11178 } else {
11179 if (range < 0.01 && max_fixed_fps <
11180 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11181 fps_range[0] =
11182 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11183 fps_range[1] =
11184 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11185 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11186 }
11187 }
11188 }
11189 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11190
11191 /*precapture trigger*/
11192 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11193 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11194
11195 /*af trigger*/
11196 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11197 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11198
11199 /* ae & af regions */
11200 int32_t active_region[] = {
11201 gCamCapability[mCameraId]->active_array_size.left,
11202 gCamCapability[mCameraId]->active_array_size.top,
11203 gCamCapability[mCameraId]->active_array_size.left +
11204 gCamCapability[mCameraId]->active_array_size.width,
11205 gCamCapability[mCameraId]->active_array_size.top +
11206 gCamCapability[mCameraId]->active_array_size.height,
11207 0};
11208 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11209 sizeof(active_region) / sizeof(active_region[0]));
11210 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11211 sizeof(active_region) / sizeof(active_region[0]));
11212
11213 /* black level lock */
11214 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11215 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11216
Thierry Strudel3d639192016-09-09 11:52:26 -070011217 //special defaults for manual template
11218 if (type == CAMERA3_TEMPLATE_MANUAL) {
11219 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11220 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11221
11222 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11223 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11224
11225 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11226 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11227
11228 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11229 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11230
11231 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11232 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11233
11234 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11235 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11236 }
11237
11238
11239 /* TNR
11240 * We'll use this location to determine which modes TNR will be set.
11241 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11242 * This is not to be confused with linking on a per stream basis that decision
11243 * is still on per-session basis and will be handled as part of config stream
11244 */
11245 uint8_t tnr_enable = 0;
11246
11247 if (m_bTnrPreview || m_bTnrVideo) {
11248
11249 switch (type) {
11250 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11251 tnr_enable = 1;
11252 break;
11253
11254 default:
11255 tnr_enable = 0;
11256 break;
11257 }
11258
11259 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11260 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11261 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11262
11263 LOGD("TNR:%d with process plate %d for template:%d",
11264 tnr_enable, tnr_process_type, type);
11265 }
11266
11267 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011268 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011269 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11270
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011271 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011272 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11273
Shuzhen Wang920ea402017-05-03 08:49:39 -070011274 uint8_t related_camera_id = mCameraId;
11275 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011276
11277 /* CDS default */
11278 char prop[PROPERTY_VALUE_MAX];
11279 memset(prop, 0, sizeof(prop));
11280 property_get("persist.camera.CDS", prop, "Auto");
11281 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11282 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11283 if (CAM_CDS_MODE_MAX == cds_mode) {
11284 cds_mode = CAM_CDS_MODE_AUTO;
11285 }
11286
11287 /* Disabling CDS in templates which have TNR enabled*/
11288 if (tnr_enable)
11289 cds_mode = CAM_CDS_MODE_OFF;
11290
11291 int32_t mode = cds_mode;
11292 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011293
Thierry Strudel269c81a2016-10-12 12:13:59 -070011294 /* Manual Convergence AEC Speed is disabled by default*/
11295 float default_aec_speed = 0;
11296 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11297
11298 /* Manual Convergence AWB Speed is disabled by default*/
11299 float default_awb_speed = 0;
11300 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11301
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011302 // Set instant AEC to normal convergence by default
11303 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11304 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11305
Shuzhen Wang19463d72016-03-08 11:09:52 -080011306 /* hybrid ae */
11307 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11308
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011309 if (gExposeEnableZslKey) {
11310 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11311 }
11312
Thierry Strudel3d639192016-09-09 11:52:26 -070011313 mDefaultMetadata[type] = settings.release();
11314
11315 return mDefaultMetadata[type];
11316}
11317
11318/*===========================================================================
11319 * FUNCTION : setFrameParameters
11320 *
11321 * DESCRIPTION: set parameters per frame as requested in the metadata from
11322 * framework
11323 *
11324 * PARAMETERS :
11325 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011326 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011327 * @blob_request: Whether this request is a blob request or not
11328 *
11329 * RETURN : success: NO_ERROR
11330 * failure:
11331 *==========================================================================*/
11332int QCamera3HardwareInterface::setFrameParameters(
11333 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011334 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011335 int blob_request,
11336 uint32_t snapshotStreamId)
11337{
11338 /*translate from camera_metadata_t type to parm_type_t*/
11339 int rc = 0;
11340 int32_t hal_version = CAM_HAL_V3;
11341
11342 clear_metadata_buffer(mParameters);
11343 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11344 LOGE("Failed to set hal version in the parameters");
11345 return BAD_VALUE;
11346 }
11347
11348 /*we need to update the frame number in the parameters*/
11349 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11350 request->frame_number)) {
11351 LOGE("Failed to set the frame number in the parameters");
11352 return BAD_VALUE;
11353 }
11354
11355 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011356 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011357 LOGE("Failed to set stream type mask in the parameters");
11358 return BAD_VALUE;
11359 }
11360
11361 if (mUpdateDebugLevel) {
11362 uint32_t dummyDebugLevel = 0;
11363 /* The value of dummyDebugLevel is irrelavent. On
11364 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11365 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11366 dummyDebugLevel)) {
11367 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11368 return BAD_VALUE;
11369 }
11370 mUpdateDebugLevel = false;
11371 }
11372
11373 if(request->settings != NULL){
11374 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11375 if (blob_request)
11376 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11377 }
11378
11379 return rc;
11380}
11381
11382/*===========================================================================
11383 * FUNCTION : setReprocParameters
11384 *
11385 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11386 * return it.
11387 *
11388 * PARAMETERS :
11389 * @request : request that needs to be serviced
11390 *
11391 * RETURN : success: NO_ERROR
11392 * failure:
11393 *==========================================================================*/
11394int32_t QCamera3HardwareInterface::setReprocParameters(
11395 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11396 uint32_t snapshotStreamId)
11397{
11398 /*translate from camera_metadata_t type to parm_type_t*/
11399 int rc = 0;
11400
11401 if (NULL == request->settings){
11402 LOGE("Reprocess settings cannot be NULL");
11403 return BAD_VALUE;
11404 }
11405
11406 if (NULL == reprocParam) {
11407 LOGE("Invalid reprocessing metadata buffer");
11408 return BAD_VALUE;
11409 }
11410 clear_metadata_buffer(reprocParam);
11411
11412 /*we need to update the frame number in the parameters*/
11413 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11414 request->frame_number)) {
11415 LOGE("Failed to set the frame number in the parameters");
11416 return BAD_VALUE;
11417 }
11418
11419 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11420 if (rc < 0) {
11421 LOGE("Failed to translate reproc request");
11422 return rc;
11423 }
11424
11425 CameraMetadata frame_settings;
11426 frame_settings = request->settings;
11427 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11428 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11429 int32_t *crop_count =
11430 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11431 int32_t *crop_data =
11432 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11433 int32_t *roi_map =
11434 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11435 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11436 cam_crop_data_t crop_meta;
11437 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11438 crop_meta.num_of_streams = 1;
11439 crop_meta.crop_info[0].crop.left = crop_data[0];
11440 crop_meta.crop_info[0].crop.top = crop_data[1];
11441 crop_meta.crop_info[0].crop.width = crop_data[2];
11442 crop_meta.crop_info[0].crop.height = crop_data[3];
11443
11444 crop_meta.crop_info[0].roi_map.left =
11445 roi_map[0];
11446 crop_meta.crop_info[0].roi_map.top =
11447 roi_map[1];
11448 crop_meta.crop_info[0].roi_map.width =
11449 roi_map[2];
11450 crop_meta.crop_info[0].roi_map.height =
11451 roi_map[3];
11452
11453 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11454 rc = BAD_VALUE;
11455 }
11456 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11457 request->input_buffer->stream,
11458 crop_meta.crop_info[0].crop.left,
11459 crop_meta.crop_info[0].crop.top,
11460 crop_meta.crop_info[0].crop.width,
11461 crop_meta.crop_info[0].crop.height);
11462 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11463 request->input_buffer->stream,
11464 crop_meta.crop_info[0].roi_map.left,
11465 crop_meta.crop_info[0].roi_map.top,
11466 crop_meta.crop_info[0].roi_map.width,
11467 crop_meta.crop_info[0].roi_map.height);
11468 } else {
11469 LOGE("Invalid reprocess crop count %d!", *crop_count);
11470 }
11471 } else {
11472 LOGE("No crop data from matching output stream");
11473 }
11474
11475 /* These settings are not needed for regular requests so handle them specially for
11476 reprocess requests; information needed for EXIF tags */
11477 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11478 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11479 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11480 if (NAME_NOT_FOUND != val) {
11481 uint32_t flashMode = (uint32_t)val;
11482 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11483 rc = BAD_VALUE;
11484 }
11485 } else {
11486 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11487 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11488 }
11489 } else {
11490 LOGH("No flash mode in reprocess settings");
11491 }
11492
11493 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11494 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11495 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11496 rc = BAD_VALUE;
11497 }
11498 } else {
11499 LOGH("No flash state in reprocess settings");
11500 }
11501
11502 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11503 uint8_t *reprocessFlags =
11504 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11505 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11506 *reprocessFlags)) {
11507 rc = BAD_VALUE;
11508 }
11509 }
11510
Thierry Strudel54dc9782017-02-15 12:12:10 -080011511 // Add exif debug data to internal metadata
11512 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11513 mm_jpeg_debug_exif_params_t *debug_params =
11514 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11515 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11516 // AE
11517 if (debug_params->ae_debug_params_valid == TRUE) {
11518 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11519 debug_params->ae_debug_params);
11520 }
11521 // AWB
11522 if (debug_params->awb_debug_params_valid == TRUE) {
11523 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11524 debug_params->awb_debug_params);
11525 }
11526 // AF
11527 if (debug_params->af_debug_params_valid == TRUE) {
11528 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11529 debug_params->af_debug_params);
11530 }
11531 // ASD
11532 if (debug_params->asd_debug_params_valid == TRUE) {
11533 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11534 debug_params->asd_debug_params);
11535 }
11536 // Stats
11537 if (debug_params->stats_debug_params_valid == TRUE) {
11538 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11539 debug_params->stats_debug_params);
11540 }
11541 // BE Stats
11542 if (debug_params->bestats_debug_params_valid == TRUE) {
11543 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11544 debug_params->bestats_debug_params);
11545 }
11546 // BHIST
11547 if (debug_params->bhist_debug_params_valid == TRUE) {
11548 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11549 debug_params->bhist_debug_params);
11550 }
11551 // 3A Tuning
11552 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11553 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11554 debug_params->q3a_tuning_debug_params);
11555 }
11556 }
11557
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011558 // Add metadata which reprocess needs
11559 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11560 cam_reprocess_info_t *repro_info =
11561 (cam_reprocess_info_t *)frame_settings.find
11562 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011563 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011564 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011565 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011566 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011567 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011568 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011569 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011570 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011571 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011572 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011573 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011574 repro_info->pipeline_flip);
11575 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11576 repro_info->af_roi);
11577 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11578 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011579 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11580 CAM_INTF_PARM_ROTATION metadata then has been added in
11581 translateToHalMetadata. HAL need to keep this new rotation
11582 metadata. Otherwise, the old rotation info saved in the vendor tag
11583 would be used */
11584 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11585 CAM_INTF_PARM_ROTATION, reprocParam) {
11586 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11587 } else {
11588 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011589 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011590 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011591 }
11592
11593 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11594 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11595 roi.width and roi.height would be the final JPEG size.
11596 For now, HAL only checks this for reprocess request */
11597 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11598 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11599 uint8_t *enable =
11600 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11601 if (*enable == TRUE) {
11602 int32_t *crop_data =
11603 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11604 cam_stream_crop_info_t crop_meta;
11605 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11606 crop_meta.stream_id = 0;
11607 crop_meta.crop.left = crop_data[0];
11608 crop_meta.crop.top = crop_data[1];
11609 crop_meta.crop.width = crop_data[2];
11610 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011611 // The JPEG crop roi should match cpp output size
11612 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11613 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11614 crop_meta.roi_map.left = 0;
11615 crop_meta.roi_map.top = 0;
11616 crop_meta.roi_map.width = cpp_crop->crop.width;
11617 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011618 }
11619 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11620 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011621 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011622 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011623 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11624 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011625 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011626 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11627
11628 // Add JPEG scale information
11629 cam_dimension_t scale_dim;
11630 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11631 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11632 int32_t *roi =
11633 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11634 scale_dim.width = roi[2];
11635 scale_dim.height = roi[3];
11636 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11637 scale_dim);
11638 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11639 scale_dim.width, scale_dim.height, mCameraId);
11640 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011641 }
11642 }
11643
11644 return rc;
11645}
11646
11647/*===========================================================================
11648 * FUNCTION : saveRequestSettings
11649 *
11650 * DESCRIPTION: Add any settings that might have changed to the request settings
11651 * and save the settings to be applied on the frame
11652 *
11653 * PARAMETERS :
11654 * @jpegMetadata : the extracted and/or modified jpeg metadata
11655 * @request : request with initial settings
11656 *
11657 * RETURN :
11658 * camera_metadata_t* : pointer to the saved request settings
11659 *==========================================================================*/
11660camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11661 const CameraMetadata &jpegMetadata,
11662 camera3_capture_request_t *request)
11663{
11664 camera_metadata_t *resultMetadata;
11665 CameraMetadata camMetadata;
11666 camMetadata = request->settings;
11667
11668 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11669 int32_t thumbnail_size[2];
11670 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11671 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11672 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11673 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11674 }
11675
11676 if (request->input_buffer != NULL) {
11677 uint8_t reprocessFlags = 1;
11678 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11679 (uint8_t*)&reprocessFlags,
11680 sizeof(reprocessFlags));
11681 }
11682
11683 resultMetadata = camMetadata.release();
11684 return resultMetadata;
11685}
11686
11687/*===========================================================================
11688 * FUNCTION : setHalFpsRange
11689 *
11690 * DESCRIPTION: set FPS range parameter
11691 *
11692 *
11693 * PARAMETERS :
11694 * @settings : Metadata from framework
11695 * @hal_metadata: Metadata buffer
11696 *
11697 *
11698 * RETURN : success: NO_ERROR
11699 * failure:
11700 *==========================================================================*/
11701int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11702 metadata_buffer_t *hal_metadata)
11703{
11704 int32_t rc = NO_ERROR;
11705 cam_fps_range_t fps_range;
11706 fps_range.min_fps = (float)
11707 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11708 fps_range.max_fps = (float)
11709 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11710 fps_range.video_min_fps = fps_range.min_fps;
11711 fps_range.video_max_fps = fps_range.max_fps;
11712
11713 LOGD("aeTargetFpsRange fps: [%f %f]",
11714 fps_range.min_fps, fps_range.max_fps);
11715 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11716 * follows:
11717 * ---------------------------------------------------------------|
11718 * Video stream is absent in configure_streams |
11719 * (Camcorder preview before the first video record |
11720 * ---------------------------------------------------------------|
11721 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11722 * | | | vid_min/max_fps|
11723 * ---------------------------------------------------------------|
11724 * NO | [ 30, 240] | 240 | [240, 240] |
11725 * |-------------|-------------|----------------|
11726 * | [240, 240] | 240 | [240, 240] |
11727 * ---------------------------------------------------------------|
11728 * Video stream is present in configure_streams |
11729 * ---------------------------------------------------------------|
11730 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11731 * | | | vid_min/max_fps|
11732 * ---------------------------------------------------------------|
11733 * NO | [ 30, 240] | 240 | [240, 240] |
11734 * (camcorder prev |-------------|-------------|----------------|
11735 * after video rec | [240, 240] | 240 | [240, 240] |
11736 * is stopped) | | | |
11737 * ---------------------------------------------------------------|
11738 * YES | [ 30, 240] | 240 | [240, 240] |
11739 * |-------------|-------------|----------------|
11740 * | [240, 240] | 240 | [240, 240] |
11741 * ---------------------------------------------------------------|
11742 * When Video stream is absent in configure_streams,
11743 * preview fps = sensor_fps / batchsize
11744 * Eg: for 240fps at batchSize 4, preview = 60fps
11745 * for 120fps at batchSize 4, preview = 30fps
11746 *
11747 * When video stream is present in configure_streams, preview fps is as per
11748 * the ratio of preview buffers to video buffers requested in process
11749 * capture request
11750 */
11751 mBatchSize = 0;
11752 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11753 fps_range.min_fps = fps_range.video_max_fps;
11754 fps_range.video_min_fps = fps_range.video_max_fps;
11755 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11756 fps_range.max_fps);
11757 if (NAME_NOT_FOUND != val) {
11758 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11759 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11760 return BAD_VALUE;
11761 }
11762
11763 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11764 /* If batchmode is currently in progress and the fps changes,
11765 * set the flag to restart the sensor */
11766 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11767 (mHFRVideoFps != fps_range.max_fps)) {
11768 mNeedSensorRestart = true;
11769 }
11770 mHFRVideoFps = fps_range.max_fps;
11771 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11772 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11773 mBatchSize = MAX_HFR_BATCH_SIZE;
11774 }
11775 }
11776 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11777
11778 }
11779 } else {
11780 /* HFR mode is session param in backend/ISP. This should be reset when
11781 * in non-HFR mode */
11782 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11783 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11784 return BAD_VALUE;
11785 }
11786 }
11787 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11788 return BAD_VALUE;
11789 }
11790 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11791 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11792 return rc;
11793}
11794
11795/*===========================================================================
11796 * FUNCTION : translateToHalMetadata
11797 *
11798 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11799 *
11800 *
11801 * PARAMETERS :
11802 * @request : request sent from framework
11803 *
11804 *
11805 * RETURN : success: NO_ERROR
11806 * failure:
11807 *==========================================================================*/
11808int QCamera3HardwareInterface::translateToHalMetadata
11809 (const camera3_capture_request_t *request,
11810 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011811 uint32_t snapshotStreamId) {
11812 if (request == nullptr || hal_metadata == nullptr) {
11813 return BAD_VALUE;
11814 }
11815
11816 int64_t minFrameDuration = getMinFrameDuration(request);
11817
11818 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11819 minFrameDuration);
11820}
11821
11822int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11823 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11824 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11825
Thierry Strudel3d639192016-09-09 11:52:26 -070011826 int rc = 0;
11827 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011828 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011829
11830 /* Do not change the order of the following list unless you know what you are
11831 * doing.
11832 * The order is laid out in such a way that parameters in the front of the table
11833 * may be used to override the parameters later in the table. Examples are:
11834 * 1. META_MODE should precede AEC/AWB/AF MODE
11835 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11836 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11837 * 4. Any mode should precede it's corresponding settings
11838 */
11839 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11840 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11841 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11842 rc = BAD_VALUE;
11843 }
11844 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11845 if (rc != NO_ERROR) {
11846 LOGE("extractSceneMode failed");
11847 }
11848 }
11849
11850 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11851 uint8_t fwk_aeMode =
11852 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11853 uint8_t aeMode;
11854 int32_t redeye;
11855
11856 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11857 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011858 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11859 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011860 } else {
11861 aeMode = CAM_AE_MODE_ON;
11862 }
11863 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11864 redeye = 1;
11865 } else {
11866 redeye = 0;
11867 }
11868
11869 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11870 fwk_aeMode);
11871 if (NAME_NOT_FOUND != val) {
11872 int32_t flashMode = (int32_t)val;
11873 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11874 }
11875
11876 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11877 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11878 rc = BAD_VALUE;
11879 }
11880 }
11881
11882 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11883 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11884 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11885 fwk_whiteLevel);
11886 if (NAME_NOT_FOUND != val) {
11887 uint8_t whiteLevel = (uint8_t)val;
11888 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11889 rc = BAD_VALUE;
11890 }
11891 }
11892 }
11893
11894 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11895 uint8_t fwk_cacMode =
11896 frame_settings.find(
11897 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11898 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11899 fwk_cacMode);
11900 if (NAME_NOT_FOUND != val) {
11901 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11902 bool entryAvailable = FALSE;
11903 // Check whether Frameworks set CAC mode is supported in device or not
11904 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11905 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11906 entryAvailable = TRUE;
11907 break;
11908 }
11909 }
11910 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11911 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11912 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11913 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11914 if (entryAvailable == FALSE) {
11915 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11916 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11917 } else {
11918 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11919 // High is not supported and so set the FAST as spec say's underlying
11920 // device implementation can be the same for both modes.
11921 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11922 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11923 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11924 // in order to avoid the fps drop due to high quality
11925 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11926 } else {
11927 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11928 }
11929 }
11930 }
11931 LOGD("Final cacMode is %d", cacMode);
11932 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11933 rc = BAD_VALUE;
11934 }
11935 } else {
11936 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11937 }
11938 }
11939
Thierry Strudel2896d122017-02-23 19:18:03 -080011940 char af_value[PROPERTY_VALUE_MAX];
11941 property_get("persist.camera.af.infinity", af_value, "0");
11942
Jason Lee84ae9972017-02-24 13:24:24 -080011943 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011944 if (atoi(af_value) == 0) {
11945 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011946 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011947 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11948 fwk_focusMode);
11949 if (NAME_NOT_FOUND != val) {
11950 uint8_t focusMode = (uint8_t)val;
11951 LOGD("set focus mode %d", focusMode);
11952 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11953 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11954 rc = BAD_VALUE;
11955 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011956 }
11957 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011958 } else {
11959 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11960 LOGE("Focus forced to infinity %d", focusMode);
11961 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11962 rc = BAD_VALUE;
11963 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011964 }
11965
Jason Lee84ae9972017-02-24 13:24:24 -080011966 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11967 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011968 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11969 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11970 focalDistance)) {
11971 rc = BAD_VALUE;
11972 }
11973 }
11974
11975 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11976 uint8_t fwk_antibandingMode =
11977 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11978 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11979 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11980 if (NAME_NOT_FOUND != val) {
11981 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011982 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11983 if (m60HzZone) {
11984 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11985 } else {
11986 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11987 }
11988 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011989 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11990 hal_antibandingMode)) {
11991 rc = BAD_VALUE;
11992 }
11993 }
11994 }
11995
11996 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11997 int32_t expCompensation = frame_settings.find(
11998 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11999 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12000 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12001 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12002 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012003 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012004 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12005 expCompensation)) {
12006 rc = BAD_VALUE;
12007 }
12008 }
12009
12010 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12011 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12012 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12013 rc = BAD_VALUE;
12014 }
12015 }
12016 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12017 rc = setHalFpsRange(frame_settings, hal_metadata);
12018 if (rc != NO_ERROR) {
12019 LOGE("setHalFpsRange failed");
12020 }
12021 }
12022
12023 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12024 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12025 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12026 rc = BAD_VALUE;
12027 }
12028 }
12029
12030 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12031 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12032 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12033 fwk_effectMode);
12034 if (NAME_NOT_FOUND != val) {
12035 uint8_t effectMode = (uint8_t)val;
12036 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12037 rc = BAD_VALUE;
12038 }
12039 }
12040 }
12041
12042 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12043 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12044 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12045 colorCorrectMode)) {
12046 rc = BAD_VALUE;
12047 }
12048 }
12049
12050 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12051 cam_color_correct_gains_t colorCorrectGains;
12052 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12053 colorCorrectGains.gains[i] =
12054 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12055 }
12056 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12057 colorCorrectGains)) {
12058 rc = BAD_VALUE;
12059 }
12060 }
12061
12062 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12063 cam_color_correct_matrix_t colorCorrectTransform;
12064 cam_rational_type_t transform_elem;
12065 size_t num = 0;
12066 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12067 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12068 transform_elem.numerator =
12069 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12070 transform_elem.denominator =
12071 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12072 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12073 num++;
12074 }
12075 }
12076 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12077 colorCorrectTransform)) {
12078 rc = BAD_VALUE;
12079 }
12080 }
12081
12082 cam_trigger_t aecTrigger;
12083 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12084 aecTrigger.trigger_id = -1;
12085 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12086 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12087 aecTrigger.trigger =
12088 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12089 aecTrigger.trigger_id =
12090 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12091 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12092 aecTrigger)) {
12093 rc = BAD_VALUE;
12094 }
12095 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12096 aecTrigger.trigger, aecTrigger.trigger_id);
12097 }
12098
12099 /*af_trigger must come with a trigger id*/
12100 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12101 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12102 cam_trigger_t af_trigger;
12103 af_trigger.trigger =
12104 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12105 af_trigger.trigger_id =
12106 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12107 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12108 rc = BAD_VALUE;
12109 }
12110 LOGD("AfTrigger: %d AfTriggerID: %d",
12111 af_trigger.trigger, af_trigger.trigger_id);
12112 }
12113
12114 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12115 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12116 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12117 rc = BAD_VALUE;
12118 }
12119 }
12120 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12121 cam_edge_application_t edge_application;
12122 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012123
Thierry Strudel3d639192016-09-09 11:52:26 -070012124 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12125 edge_application.sharpness = 0;
12126 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012127 edge_application.sharpness =
12128 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12129 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12130 int32_t sharpness =
12131 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12132 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12133 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12134 LOGD("Setting edge mode sharpness %d", sharpness);
12135 edge_application.sharpness = sharpness;
12136 }
12137 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012138 }
12139 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12140 rc = BAD_VALUE;
12141 }
12142 }
12143
12144 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12145 int32_t respectFlashMode = 1;
12146 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12147 uint8_t fwk_aeMode =
12148 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012149 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12150 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12151 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012152 respectFlashMode = 0;
12153 LOGH("AE Mode controls flash, ignore android.flash.mode");
12154 }
12155 }
12156 if (respectFlashMode) {
12157 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12158 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12159 LOGH("flash mode after mapping %d", val);
12160 // To check: CAM_INTF_META_FLASH_MODE usage
12161 if (NAME_NOT_FOUND != val) {
12162 uint8_t flashMode = (uint8_t)val;
12163 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12164 rc = BAD_VALUE;
12165 }
12166 }
12167 }
12168 }
12169
12170 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12171 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12172 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12173 rc = BAD_VALUE;
12174 }
12175 }
12176
12177 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12178 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12179 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12180 flashFiringTime)) {
12181 rc = BAD_VALUE;
12182 }
12183 }
12184
12185 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12186 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12187 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12188 hotPixelMode)) {
12189 rc = BAD_VALUE;
12190 }
12191 }
12192
12193 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12194 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12195 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12196 lensAperture)) {
12197 rc = BAD_VALUE;
12198 }
12199 }
12200
12201 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12202 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12203 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12204 filterDensity)) {
12205 rc = BAD_VALUE;
12206 }
12207 }
12208
12209 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12210 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12211 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12212 focalLength)) {
12213 rc = BAD_VALUE;
12214 }
12215 }
12216
12217 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12218 uint8_t optStabMode =
12219 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12220 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12221 optStabMode)) {
12222 rc = BAD_VALUE;
12223 }
12224 }
12225
12226 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12227 uint8_t videoStabMode =
12228 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12229 LOGD("videoStabMode from APP = %d", videoStabMode);
12230 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12231 videoStabMode)) {
12232 rc = BAD_VALUE;
12233 }
12234 }
12235
12236
12237 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12238 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12239 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12240 noiseRedMode)) {
12241 rc = BAD_VALUE;
12242 }
12243 }
12244
12245 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12246 float reprocessEffectiveExposureFactor =
12247 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12248 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12249 reprocessEffectiveExposureFactor)) {
12250 rc = BAD_VALUE;
12251 }
12252 }
12253
12254 cam_crop_region_t scalerCropRegion;
12255 bool scalerCropSet = false;
12256 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12257 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12258 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12259 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12260 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12261
12262 // Map coordinate system from active array to sensor output.
12263 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12264 scalerCropRegion.width, scalerCropRegion.height);
12265
12266 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12267 scalerCropRegion)) {
12268 rc = BAD_VALUE;
12269 }
12270 scalerCropSet = true;
12271 }
12272
12273 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12274 int64_t sensorExpTime =
12275 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12276 LOGD("setting sensorExpTime %lld", sensorExpTime);
12277 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12278 sensorExpTime)) {
12279 rc = BAD_VALUE;
12280 }
12281 }
12282
12283 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12284 int64_t sensorFrameDuration =
12285 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012286 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12287 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12288 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12289 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12290 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12291 sensorFrameDuration)) {
12292 rc = BAD_VALUE;
12293 }
12294 }
12295
12296 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12297 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12298 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12299 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12300 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12301 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12302 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12303 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12304 sensorSensitivity)) {
12305 rc = BAD_VALUE;
12306 }
12307 }
12308
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012309#ifndef USE_HAL_3_3
12310 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12311 int32_t ispSensitivity =
12312 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12313 if (ispSensitivity <
12314 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12315 ispSensitivity =
12316 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12317 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12318 }
12319 if (ispSensitivity >
12320 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12321 ispSensitivity =
12322 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12323 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12324 }
12325 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12326 ispSensitivity)) {
12327 rc = BAD_VALUE;
12328 }
12329 }
12330#endif
12331
Thierry Strudel3d639192016-09-09 11:52:26 -070012332 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12333 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12334 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12335 rc = BAD_VALUE;
12336 }
12337 }
12338
12339 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12340 uint8_t fwk_facedetectMode =
12341 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12342
12343 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12344 fwk_facedetectMode);
12345
12346 if (NAME_NOT_FOUND != val) {
12347 uint8_t facedetectMode = (uint8_t)val;
12348 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12349 facedetectMode)) {
12350 rc = BAD_VALUE;
12351 }
12352 }
12353 }
12354
Thierry Strudel54dc9782017-02-15 12:12:10 -080012355 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012356 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012357 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012358 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12359 histogramMode)) {
12360 rc = BAD_VALUE;
12361 }
12362 }
12363
12364 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12365 uint8_t sharpnessMapMode =
12366 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12367 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12368 sharpnessMapMode)) {
12369 rc = BAD_VALUE;
12370 }
12371 }
12372
12373 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12374 uint8_t tonemapMode =
12375 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12376 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12377 rc = BAD_VALUE;
12378 }
12379 }
12380 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12381 /*All tonemap channels will have the same number of points*/
12382 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12383 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12384 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12385 cam_rgb_tonemap_curves tonemapCurves;
12386 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12387 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12388 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12389 tonemapCurves.tonemap_points_cnt,
12390 CAM_MAX_TONEMAP_CURVE_SIZE);
12391 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12392 }
12393
12394 /* ch0 = G*/
12395 size_t point = 0;
12396 cam_tonemap_curve_t tonemapCurveGreen;
12397 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12398 for (size_t j = 0; j < 2; j++) {
12399 tonemapCurveGreen.tonemap_points[i][j] =
12400 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12401 point++;
12402 }
12403 }
12404 tonemapCurves.curves[0] = tonemapCurveGreen;
12405
12406 /* ch 1 = B */
12407 point = 0;
12408 cam_tonemap_curve_t tonemapCurveBlue;
12409 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12410 for (size_t j = 0; j < 2; j++) {
12411 tonemapCurveBlue.tonemap_points[i][j] =
12412 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12413 point++;
12414 }
12415 }
12416 tonemapCurves.curves[1] = tonemapCurveBlue;
12417
12418 /* ch 2 = R */
12419 point = 0;
12420 cam_tonemap_curve_t tonemapCurveRed;
12421 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12422 for (size_t j = 0; j < 2; j++) {
12423 tonemapCurveRed.tonemap_points[i][j] =
12424 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12425 point++;
12426 }
12427 }
12428 tonemapCurves.curves[2] = tonemapCurveRed;
12429
12430 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12431 tonemapCurves)) {
12432 rc = BAD_VALUE;
12433 }
12434 }
12435
12436 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12437 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12438 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12439 captureIntent)) {
12440 rc = BAD_VALUE;
12441 }
12442 }
12443
12444 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12445 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12446 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12447 blackLevelLock)) {
12448 rc = BAD_VALUE;
12449 }
12450 }
12451
12452 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12453 uint8_t lensShadingMapMode =
12454 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12455 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12456 lensShadingMapMode)) {
12457 rc = BAD_VALUE;
12458 }
12459 }
12460
12461 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12462 cam_area_t roi;
12463 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012464 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012465
12466 // Map coordinate system from active array to sensor output.
12467 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12468 roi.rect.height);
12469
12470 if (scalerCropSet) {
12471 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12472 }
12473 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12474 rc = BAD_VALUE;
12475 }
12476 }
12477
12478 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12479 cam_area_t roi;
12480 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012481 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012482
12483 // Map coordinate system from active array to sensor output.
12484 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12485 roi.rect.height);
12486
12487 if (scalerCropSet) {
12488 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12489 }
12490 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12491 rc = BAD_VALUE;
12492 }
12493 }
12494
12495 // CDS for non-HFR non-video mode
12496 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12497 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12498 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12499 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12500 LOGE("Invalid CDS mode %d!", *fwk_cds);
12501 } else {
12502 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12503 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12504 rc = BAD_VALUE;
12505 }
12506 }
12507 }
12508
Thierry Strudel04e026f2016-10-10 11:27:36 -070012509 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012510 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012511 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012512 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12513 }
12514 if (m_bVideoHdrEnabled)
12515 vhdr = CAM_VIDEO_HDR_MODE_ON;
12516
Thierry Strudel54dc9782017-02-15 12:12:10 -080012517 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12518
12519 if(vhdr != curr_hdr_state)
12520 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12521
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012522 rc = setVideoHdrMode(mParameters, vhdr);
12523 if (rc != NO_ERROR) {
12524 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012525 }
12526
12527 //IR
12528 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12529 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12530 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012531 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12532 uint8_t isIRon = 0;
12533
12534 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012535 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12536 LOGE("Invalid IR mode %d!", fwk_ir);
12537 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012538 if(isIRon != curr_ir_state )
12539 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12540
Thierry Strudel04e026f2016-10-10 11:27:36 -070012541 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12542 CAM_INTF_META_IR_MODE, fwk_ir)) {
12543 rc = BAD_VALUE;
12544 }
12545 }
12546 }
12547
Thierry Strudel54dc9782017-02-15 12:12:10 -080012548 //Binning Correction Mode
12549 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12550 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12551 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12552 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12553 || (0 > fwk_binning_correction)) {
12554 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12555 } else {
12556 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12557 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12558 rc = BAD_VALUE;
12559 }
12560 }
12561 }
12562
Thierry Strudel269c81a2016-10-12 12:13:59 -070012563 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12564 float aec_speed;
12565 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12566 LOGD("AEC Speed :%f", aec_speed);
12567 if ( aec_speed < 0 ) {
12568 LOGE("Invalid AEC mode %f!", aec_speed);
12569 } else {
12570 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12571 aec_speed)) {
12572 rc = BAD_VALUE;
12573 }
12574 }
12575 }
12576
12577 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12578 float awb_speed;
12579 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12580 LOGD("AWB Speed :%f", awb_speed);
12581 if ( awb_speed < 0 ) {
12582 LOGE("Invalid AWB mode %f!", awb_speed);
12583 } else {
12584 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12585 awb_speed)) {
12586 rc = BAD_VALUE;
12587 }
12588 }
12589 }
12590
Thierry Strudel3d639192016-09-09 11:52:26 -070012591 // TNR
12592 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12593 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12594 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012595 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012596 cam_denoise_param_t tnr;
12597 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12598 tnr.process_plates =
12599 (cam_denoise_process_type_t)frame_settings.find(
12600 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12601 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012602
12603 if(b_TnrRequested != curr_tnr_state)
12604 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12605
Thierry Strudel3d639192016-09-09 11:52:26 -070012606 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12607 rc = BAD_VALUE;
12608 }
12609 }
12610
Thierry Strudel54dc9782017-02-15 12:12:10 -080012611 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012612 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012613 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012614 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12615 *exposure_metering_mode)) {
12616 rc = BAD_VALUE;
12617 }
12618 }
12619
Thierry Strudel3d639192016-09-09 11:52:26 -070012620 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12621 int32_t fwk_testPatternMode =
12622 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12623 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12624 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12625
12626 if (NAME_NOT_FOUND != testPatternMode) {
12627 cam_test_pattern_data_t testPatternData;
12628 memset(&testPatternData, 0, sizeof(testPatternData));
12629 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12630 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12631 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12632 int32_t *fwk_testPatternData =
12633 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12634 testPatternData.r = fwk_testPatternData[0];
12635 testPatternData.b = fwk_testPatternData[3];
12636 switch (gCamCapability[mCameraId]->color_arrangement) {
12637 case CAM_FILTER_ARRANGEMENT_RGGB:
12638 case CAM_FILTER_ARRANGEMENT_GRBG:
12639 testPatternData.gr = fwk_testPatternData[1];
12640 testPatternData.gb = fwk_testPatternData[2];
12641 break;
12642 case CAM_FILTER_ARRANGEMENT_GBRG:
12643 case CAM_FILTER_ARRANGEMENT_BGGR:
12644 testPatternData.gr = fwk_testPatternData[2];
12645 testPatternData.gb = fwk_testPatternData[1];
12646 break;
12647 default:
12648 LOGE("color arrangement %d is not supported",
12649 gCamCapability[mCameraId]->color_arrangement);
12650 break;
12651 }
12652 }
12653 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12654 testPatternData)) {
12655 rc = BAD_VALUE;
12656 }
12657 } else {
12658 LOGE("Invalid framework sensor test pattern mode %d",
12659 fwk_testPatternMode);
12660 }
12661 }
12662
12663 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12664 size_t count = 0;
12665 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12666 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12667 gps_coords.data.d, gps_coords.count, count);
12668 if (gps_coords.count != count) {
12669 rc = BAD_VALUE;
12670 }
12671 }
12672
12673 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12674 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12675 size_t count = 0;
12676 const char *gps_methods_src = (const char *)
12677 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12678 memset(gps_methods, '\0', sizeof(gps_methods));
12679 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12680 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12681 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12682 if (GPS_PROCESSING_METHOD_SIZE != count) {
12683 rc = BAD_VALUE;
12684 }
12685 }
12686
12687 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12688 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12689 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12690 gps_timestamp)) {
12691 rc = BAD_VALUE;
12692 }
12693 }
12694
12695 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12696 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12697 cam_rotation_info_t rotation_info;
12698 if (orientation == 0) {
12699 rotation_info.rotation = ROTATE_0;
12700 } else if (orientation == 90) {
12701 rotation_info.rotation = ROTATE_90;
12702 } else if (orientation == 180) {
12703 rotation_info.rotation = ROTATE_180;
12704 } else if (orientation == 270) {
12705 rotation_info.rotation = ROTATE_270;
12706 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012707 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012708 rotation_info.streamId = snapshotStreamId;
12709 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12710 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12711 rc = BAD_VALUE;
12712 }
12713 }
12714
12715 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12716 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12717 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12718 rc = BAD_VALUE;
12719 }
12720 }
12721
12722 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12723 uint32_t thumb_quality = (uint32_t)
12724 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12725 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12726 thumb_quality)) {
12727 rc = BAD_VALUE;
12728 }
12729 }
12730
12731 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12732 cam_dimension_t dim;
12733 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12734 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12735 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12736 rc = BAD_VALUE;
12737 }
12738 }
12739
12740 // Internal metadata
12741 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12742 size_t count = 0;
12743 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12744 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12745 privatedata.data.i32, privatedata.count, count);
12746 if (privatedata.count != count) {
12747 rc = BAD_VALUE;
12748 }
12749 }
12750
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012751 // ISO/Exposure Priority
12752 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12753 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12754 cam_priority_mode_t mode =
12755 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12756 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12757 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12758 use_iso_exp_pty.previewOnly = FALSE;
12759 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12760 use_iso_exp_pty.value = *ptr;
12761
12762 if(CAM_ISO_PRIORITY == mode) {
12763 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12764 use_iso_exp_pty)) {
12765 rc = BAD_VALUE;
12766 }
12767 }
12768 else {
12769 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12770 use_iso_exp_pty)) {
12771 rc = BAD_VALUE;
12772 }
12773 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012774
12775 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12776 rc = BAD_VALUE;
12777 }
12778 }
12779 } else {
12780 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12781 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012782 }
12783 }
12784
12785 // Saturation
12786 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12787 int32_t* use_saturation =
12788 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12789 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12790 rc = BAD_VALUE;
12791 }
12792 }
12793
Thierry Strudel3d639192016-09-09 11:52:26 -070012794 // EV step
12795 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12796 gCamCapability[mCameraId]->exp_compensation_step)) {
12797 rc = BAD_VALUE;
12798 }
12799
12800 // CDS info
12801 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12802 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12803 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12804
12805 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12806 CAM_INTF_META_CDS_DATA, *cdsData)) {
12807 rc = BAD_VALUE;
12808 }
12809 }
12810
Shuzhen Wang19463d72016-03-08 11:09:52 -080012811 // Hybrid AE
12812 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12813 uint8_t *hybrid_ae = (uint8_t *)
12814 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12815
12816 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12817 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12818 rc = BAD_VALUE;
12819 }
12820 }
12821
Shuzhen Wang14415f52016-11-16 18:26:18 -080012822 // Histogram
12823 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12824 uint8_t histogramMode =
12825 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12826 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12827 histogramMode)) {
12828 rc = BAD_VALUE;
12829 }
12830 }
12831
12832 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12833 int32_t histogramBins =
12834 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12835 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12836 histogramBins)) {
12837 rc = BAD_VALUE;
12838 }
12839 }
12840
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012841 // Tracking AF
12842 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12843 uint8_t trackingAfTrigger =
12844 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12845 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12846 trackingAfTrigger)) {
12847 rc = BAD_VALUE;
12848 }
12849 }
12850
Thierry Strudel3d639192016-09-09 11:52:26 -070012851 return rc;
12852}
12853
12854/*===========================================================================
12855 * FUNCTION : captureResultCb
12856 *
12857 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12858 *
12859 * PARAMETERS :
12860 * @frame : frame information from mm-camera-interface
12861 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12862 * @userdata: userdata
12863 *
12864 * RETURN : NONE
12865 *==========================================================================*/
12866void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12867 camera3_stream_buffer_t *buffer,
12868 uint32_t frame_number, bool isInputBuffer, void *userdata)
12869{
12870 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12871 if (hw == NULL) {
12872 LOGE("Invalid hw %p", hw);
12873 return;
12874 }
12875
12876 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12877 return;
12878}
12879
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012880/*===========================================================================
12881 * FUNCTION : setBufferErrorStatus
12882 *
12883 * DESCRIPTION: Callback handler for channels to report any buffer errors
12884 *
12885 * PARAMETERS :
12886 * @ch : Channel on which buffer error is reported from
12887 * @frame_number : frame number on which buffer error is reported on
12888 * @buffer_status : buffer error status
12889 * @userdata: userdata
12890 *
12891 * RETURN : NONE
12892 *==========================================================================*/
12893void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12894 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12895{
12896 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12897 if (hw == NULL) {
12898 LOGE("Invalid hw %p", hw);
12899 return;
12900 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012901
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012902 hw->setBufferErrorStatus(ch, frame_number, err);
12903 return;
12904}
12905
12906void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12907 uint32_t frameNumber, camera3_buffer_status_t err)
12908{
12909 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12910 pthread_mutex_lock(&mMutex);
12911
12912 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12913 if (req.frame_number != frameNumber)
12914 continue;
12915 for (auto& k : req.mPendingBufferList) {
12916 if(k.stream->priv == ch) {
12917 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12918 }
12919 }
12920 }
12921
12922 pthread_mutex_unlock(&mMutex);
12923 return;
12924}
Thierry Strudel3d639192016-09-09 11:52:26 -070012925/*===========================================================================
12926 * FUNCTION : initialize
12927 *
12928 * DESCRIPTION: Pass framework callback pointers to HAL
12929 *
12930 * PARAMETERS :
12931 *
12932 *
12933 * RETURN : Success : 0
12934 * Failure: -ENODEV
12935 *==========================================================================*/
12936
12937int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12938 const camera3_callback_ops_t *callback_ops)
12939{
12940 LOGD("E");
12941 QCamera3HardwareInterface *hw =
12942 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12943 if (!hw) {
12944 LOGE("NULL camera device");
12945 return -ENODEV;
12946 }
12947
12948 int rc = hw->initialize(callback_ops);
12949 LOGD("X");
12950 return rc;
12951}
12952
12953/*===========================================================================
12954 * FUNCTION : configure_streams
12955 *
12956 * DESCRIPTION:
12957 *
12958 * PARAMETERS :
12959 *
12960 *
12961 * RETURN : Success: 0
12962 * Failure: -EINVAL (if stream configuration is invalid)
12963 * -ENODEV (fatal error)
12964 *==========================================================================*/
12965
12966int QCamera3HardwareInterface::configure_streams(
12967 const struct camera3_device *device,
12968 camera3_stream_configuration_t *stream_list)
12969{
12970 LOGD("E");
12971 QCamera3HardwareInterface *hw =
12972 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12973 if (!hw) {
12974 LOGE("NULL camera device");
12975 return -ENODEV;
12976 }
12977 int rc = hw->configureStreams(stream_list);
12978 LOGD("X");
12979 return rc;
12980}
12981
12982/*===========================================================================
12983 * FUNCTION : construct_default_request_settings
12984 *
12985 * DESCRIPTION: Configure a settings buffer to meet the required use case
12986 *
12987 * PARAMETERS :
12988 *
12989 *
12990 * RETURN : Success: Return valid metadata
12991 * Failure: Return NULL
12992 *==========================================================================*/
12993const camera_metadata_t* QCamera3HardwareInterface::
12994 construct_default_request_settings(const struct camera3_device *device,
12995 int type)
12996{
12997
12998 LOGD("E");
12999 camera_metadata_t* fwk_metadata = NULL;
13000 QCamera3HardwareInterface *hw =
13001 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13002 if (!hw) {
13003 LOGE("NULL camera device");
13004 return NULL;
13005 }
13006
13007 fwk_metadata = hw->translateCapabilityToMetadata(type);
13008
13009 LOGD("X");
13010 return fwk_metadata;
13011}
13012
13013/*===========================================================================
13014 * FUNCTION : process_capture_request
13015 *
13016 * DESCRIPTION:
13017 *
13018 * PARAMETERS :
13019 *
13020 *
13021 * RETURN :
13022 *==========================================================================*/
13023int QCamera3HardwareInterface::process_capture_request(
13024 const struct camera3_device *device,
13025 camera3_capture_request_t *request)
13026{
13027 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013028 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013029 QCamera3HardwareInterface *hw =
13030 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13031 if (!hw) {
13032 LOGE("NULL camera device");
13033 return -EINVAL;
13034 }
13035
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013036 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013037 LOGD("X");
13038 return rc;
13039}
13040
13041/*===========================================================================
13042 * FUNCTION : dump
13043 *
13044 * DESCRIPTION:
13045 *
13046 * PARAMETERS :
13047 *
13048 *
13049 * RETURN :
13050 *==========================================================================*/
13051
13052void QCamera3HardwareInterface::dump(
13053 const struct camera3_device *device, int fd)
13054{
13055 /* Log level property is read when "adb shell dumpsys media.camera" is
13056 called so that the log level can be controlled without restarting
13057 the media server */
13058 getLogLevel();
13059
13060 LOGD("E");
13061 QCamera3HardwareInterface *hw =
13062 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13063 if (!hw) {
13064 LOGE("NULL camera device");
13065 return;
13066 }
13067
13068 hw->dump(fd);
13069 LOGD("X");
13070 return;
13071}
13072
13073/*===========================================================================
13074 * FUNCTION : flush
13075 *
13076 * DESCRIPTION:
13077 *
13078 * PARAMETERS :
13079 *
13080 *
13081 * RETURN :
13082 *==========================================================================*/
13083
13084int QCamera3HardwareInterface::flush(
13085 const struct camera3_device *device)
13086{
13087 int rc;
13088 LOGD("E");
13089 QCamera3HardwareInterface *hw =
13090 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13091 if (!hw) {
13092 LOGE("NULL camera device");
13093 return -EINVAL;
13094 }
13095
13096 pthread_mutex_lock(&hw->mMutex);
13097 // Validate current state
13098 switch (hw->mState) {
13099 case STARTED:
13100 /* valid state */
13101 break;
13102
13103 case ERROR:
13104 pthread_mutex_unlock(&hw->mMutex);
13105 hw->handleCameraDeviceError();
13106 return -ENODEV;
13107
13108 default:
13109 LOGI("Flush returned during state %d", hw->mState);
13110 pthread_mutex_unlock(&hw->mMutex);
13111 return 0;
13112 }
13113 pthread_mutex_unlock(&hw->mMutex);
13114
13115 rc = hw->flush(true /* restart channels */ );
13116 LOGD("X");
13117 return rc;
13118}
13119
13120/*===========================================================================
13121 * FUNCTION : close_camera_device
13122 *
13123 * DESCRIPTION:
13124 *
13125 * PARAMETERS :
13126 *
13127 *
13128 * RETURN :
13129 *==========================================================================*/
13130int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13131{
13132 int ret = NO_ERROR;
13133 QCamera3HardwareInterface *hw =
13134 reinterpret_cast<QCamera3HardwareInterface *>(
13135 reinterpret_cast<camera3_device_t *>(device)->priv);
13136 if (!hw) {
13137 LOGE("NULL camera device");
13138 return BAD_VALUE;
13139 }
13140
13141 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13142 delete hw;
13143 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013144 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013145 return ret;
13146}
13147
13148/*===========================================================================
13149 * FUNCTION : getWaveletDenoiseProcessPlate
13150 *
13151 * DESCRIPTION: query wavelet denoise process plate
13152 *
13153 * PARAMETERS : None
13154 *
13155 * RETURN : WNR prcocess plate value
13156 *==========================================================================*/
13157cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13158{
13159 char prop[PROPERTY_VALUE_MAX];
13160 memset(prop, 0, sizeof(prop));
13161 property_get("persist.denoise.process.plates", prop, "0");
13162 int processPlate = atoi(prop);
13163 switch(processPlate) {
13164 case 0:
13165 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13166 case 1:
13167 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13168 case 2:
13169 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13170 case 3:
13171 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13172 default:
13173 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13174 }
13175}
13176
13177
13178/*===========================================================================
13179 * FUNCTION : getTemporalDenoiseProcessPlate
13180 *
13181 * DESCRIPTION: query temporal denoise process plate
13182 *
13183 * PARAMETERS : None
13184 *
13185 * RETURN : TNR prcocess plate value
13186 *==========================================================================*/
13187cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13188{
13189 char prop[PROPERTY_VALUE_MAX];
13190 memset(prop, 0, sizeof(prop));
13191 property_get("persist.tnr.process.plates", prop, "0");
13192 int processPlate = atoi(prop);
13193 switch(processPlate) {
13194 case 0:
13195 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13196 case 1:
13197 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13198 case 2:
13199 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13200 case 3:
13201 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13202 default:
13203 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13204 }
13205}
13206
13207
13208/*===========================================================================
13209 * FUNCTION : extractSceneMode
13210 *
13211 * DESCRIPTION: Extract scene mode from frameworks set metadata
13212 *
13213 * PARAMETERS :
13214 * @frame_settings: CameraMetadata reference
13215 * @metaMode: ANDROID_CONTORL_MODE
13216 * @hal_metadata: hal metadata structure
13217 *
13218 * RETURN : None
13219 *==========================================================================*/
13220int32_t QCamera3HardwareInterface::extractSceneMode(
13221 const CameraMetadata &frame_settings, uint8_t metaMode,
13222 metadata_buffer_t *hal_metadata)
13223{
13224 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013225 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13226
13227 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13228 LOGD("Ignoring control mode OFF_KEEP_STATE");
13229 return NO_ERROR;
13230 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013231
13232 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13233 camera_metadata_ro_entry entry =
13234 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13235 if (0 == entry.count)
13236 return rc;
13237
13238 uint8_t fwk_sceneMode = entry.data.u8[0];
13239
13240 int val = lookupHalName(SCENE_MODES_MAP,
13241 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13242 fwk_sceneMode);
13243 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013244 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013245 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013246 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013247 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013248
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013249 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13250 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13251 }
13252
13253 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13254 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013255 cam_hdr_param_t hdr_params;
13256 hdr_params.hdr_enable = 1;
13257 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13258 hdr_params.hdr_need_1x = false;
13259 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13260 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13261 rc = BAD_VALUE;
13262 }
13263 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013264
Thierry Strudel3d639192016-09-09 11:52:26 -070013265 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13266 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13267 rc = BAD_VALUE;
13268 }
13269 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013270
13271 if (mForceHdrSnapshot) {
13272 cam_hdr_param_t hdr_params;
13273 hdr_params.hdr_enable = 1;
13274 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13275 hdr_params.hdr_need_1x = false;
13276 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13277 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13278 rc = BAD_VALUE;
13279 }
13280 }
13281
Thierry Strudel3d639192016-09-09 11:52:26 -070013282 return rc;
13283}
13284
13285/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013286 * FUNCTION : setVideoHdrMode
13287 *
13288 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13289 *
13290 * PARAMETERS :
13291 * @hal_metadata: hal metadata structure
13292 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13293 *
13294 * RETURN : None
13295 *==========================================================================*/
13296int32_t QCamera3HardwareInterface::setVideoHdrMode(
13297 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13298{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013299 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13300 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13301 }
13302
13303 LOGE("Invalid Video HDR mode %d!", vhdr);
13304 return BAD_VALUE;
13305}
13306
13307/*===========================================================================
13308 * FUNCTION : setSensorHDR
13309 *
13310 * DESCRIPTION: Enable/disable sensor HDR.
13311 *
13312 * PARAMETERS :
13313 * @hal_metadata: hal metadata structure
13314 * @enable: boolean whether to enable/disable sensor HDR
13315 *
13316 * RETURN : None
13317 *==========================================================================*/
13318int32_t QCamera3HardwareInterface::setSensorHDR(
13319 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13320{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013321 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013322 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13323
13324 if (enable) {
13325 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13326 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13327 #ifdef _LE_CAMERA_
13328 //Default to staggered HDR for IOT
13329 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13330 #else
13331 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13332 #endif
13333 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13334 }
13335
13336 bool isSupported = false;
13337 switch (sensor_hdr) {
13338 case CAM_SENSOR_HDR_IN_SENSOR:
13339 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13340 CAM_QCOM_FEATURE_SENSOR_HDR) {
13341 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013342 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013343 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013344 break;
13345 case CAM_SENSOR_HDR_ZIGZAG:
13346 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13347 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13348 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013349 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013350 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013351 break;
13352 case CAM_SENSOR_HDR_STAGGERED:
13353 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13354 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13355 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013356 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013357 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013358 break;
13359 case CAM_SENSOR_HDR_OFF:
13360 isSupported = true;
13361 LOGD("Turning off sensor HDR");
13362 break;
13363 default:
13364 LOGE("HDR mode %d not supported", sensor_hdr);
13365 rc = BAD_VALUE;
13366 break;
13367 }
13368
13369 if(isSupported) {
13370 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13371 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13372 rc = BAD_VALUE;
13373 } else {
13374 if(!isVideoHdrEnable)
13375 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013376 }
13377 }
13378 return rc;
13379}
13380
13381/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013382 * FUNCTION : needRotationReprocess
13383 *
13384 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13385 *
13386 * PARAMETERS : none
13387 *
13388 * RETURN : true: needed
13389 * false: no need
13390 *==========================================================================*/
13391bool QCamera3HardwareInterface::needRotationReprocess()
13392{
13393 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13394 // current rotation is not zero, and pp has the capability to process rotation
13395 LOGH("need do reprocess for rotation");
13396 return true;
13397 }
13398
13399 return false;
13400}
13401
13402/*===========================================================================
13403 * FUNCTION : needReprocess
13404 *
13405 * DESCRIPTION: if reprocess in needed
13406 *
13407 * PARAMETERS : none
13408 *
13409 * RETURN : true: needed
13410 * false: no need
13411 *==========================================================================*/
13412bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13413{
13414 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13415 // TODO: add for ZSL HDR later
13416 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13417 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13418 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13419 return true;
13420 } else {
13421 LOGH("already post processed frame");
13422 return false;
13423 }
13424 }
13425 return needRotationReprocess();
13426}
13427
13428/*===========================================================================
13429 * FUNCTION : needJpegExifRotation
13430 *
13431 * DESCRIPTION: if rotation from jpeg is needed
13432 *
13433 * PARAMETERS : none
13434 *
13435 * RETURN : true: needed
13436 * false: no need
13437 *==========================================================================*/
13438bool QCamera3HardwareInterface::needJpegExifRotation()
13439{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013440 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013441 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13442 LOGD("Need use Jpeg EXIF Rotation");
13443 return true;
13444 }
13445 return false;
13446}
13447
13448/*===========================================================================
13449 * FUNCTION : addOfflineReprocChannel
13450 *
13451 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13452 * coming from input channel
13453 *
13454 * PARAMETERS :
13455 * @config : reprocess configuration
13456 * @inputChHandle : pointer to the input (source) channel
13457 *
13458 *
13459 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13460 *==========================================================================*/
13461QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13462 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13463{
13464 int32_t rc = NO_ERROR;
13465 QCamera3ReprocessChannel *pChannel = NULL;
13466
13467 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013468 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13469 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013470 if (NULL == pChannel) {
13471 LOGE("no mem for reprocess channel");
13472 return NULL;
13473 }
13474
13475 rc = pChannel->initialize(IS_TYPE_NONE);
13476 if (rc != NO_ERROR) {
13477 LOGE("init reprocess channel failed, ret = %d", rc);
13478 delete pChannel;
13479 return NULL;
13480 }
13481
13482 // pp feature config
13483 cam_pp_feature_config_t pp_config;
13484 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13485
13486 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13487 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13488 & CAM_QCOM_FEATURE_DSDN) {
13489 //Use CPP CDS incase h/w supports it.
13490 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13491 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13492 }
13493 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13494 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13495 }
13496
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013497 if (config.hdr_param.hdr_enable) {
13498 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13499 pp_config.hdr_param = config.hdr_param;
13500 }
13501
13502 if (mForceHdrSnapshot) {
13503 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13504 pp_config.hdr_param.hdr_enable = 1;
13505 pp_config.hdr_param.hdr_need_1x = 0;
13506 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13507 }
13508
Thierry Strudel3d639192016-09-09 11:52:26 -070013509 rc = pChannel->addReprocStreamsFromSource(pp_config,
13510 config,
13511 IS_TYPE_NONE,
13512 mMetadataChannel);
13513
13514 if (rc != NO_ERROR) {
13515 delete pChannel;
13516 return NULL;
13517 }
13518 return pChannel;
13519}
13520
13521/*===========================================================================
13522 * FUNCTION : getMobicatMask
13523 *
13524 * DESCRIPTION: returns mobicat mask
13525 *
13526 * PARAMETERS : none
13527 *
13528 * RETURN : mobicat mask
13529 *
13530 *==========================================================================*/
13531uint8_t QCamera3HardwareInterface::getMobicatMask()
13532{
13533 return m_MobicatMask;
13534}
13535
13536/*===========================================================================
13537 * FUNCTION : setMobicat
13538 *
13539 * DESCRIPTION: set Mobicat on/off.
13540 *
13541 * PARAMETERS :
13542 * @params : none
13543 *
13544 * RETURN : int32_t type of status
13545 * NO_ERROR -- success
13546 * none-zero failure code
13547 *==========================================================================*/
13548int32_t QCamera3HardwareInterface::setMobicat()
13549{
13550 char value [PROPERTY_VALUE_MAX];
13551 property_get("persist.camera.mobicat", value, "0");
13552 int32_t ret = NO_ERROR;
13553 uint8_t enableMobi = (uint8_t)atoi(value);
13554
13555 if (enableMobi) {
13556 tune_cmd_t tune_cmd;
13557 tune_cmd.type = SET_RELOAD_CHROMATIX;
13558 tune_cmd.module = MODULE_ALL;
13559 tune_cmd.value = TRUE;
13560 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13561 CAM_INTF_PARM_SET_VFE_COMMAND,
13562 tune_cmd);
13563
13564 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13565 CAM_INTF_PARM_SET_PP_COMMAND,
13566 tune_cmd);
13567 }
13568 m_MobicatMask = enableMobi;
13569
13570 return ret;
13571}
13572
13573/*===========================================================================
13574* FUNCTION : getLogLevel
13575*
13576* DESCRIPTION: Reads the log level property into a variable
13577*
13578* PARAMETERS :
13579* None
13580*
13581* RETURN :
13582* None
13583*==========================================================================*/
13584void QCamera3HardwareInterface::getLogLevel()
13585{
13586 char prop[PROPERTY_VALUE_MAX];
13587 uint32_t globalLogLevel = 0;
13588
13589 property_get("persist.camera.hal.debug", prop, "0");
13590 int val = atoi(prop);
13591 if (0 <= val) {
13592 gCamHal3LogLevel = (uint32_t)val;
13593 }
13594
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013595 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013596 gKpiDebugLevel = atoi(prop);
13597
13598 property_get("persist.camera.global.debug", prop, "0");
13599 val = atoi(prop);
13600 if (0 <= val) {
13601 globalLogLevel = (uint32_t)val;
13602 }
13603
13604 /* Highest log level among hal.logs and global.logs is selected */
13605 if (gCamHal3LogLevel < globalLogLevel)
13606 gCamHal3LogLevel = globalLogLevel;
13607
13608 return;
13609}
13610
13611/*===========================================================================
13612 * FUNCTION : validateStreamRotations
13613 *
13614 * DESCRIPTION: Check if the rotations requested are supported
13615 *
13616 * PARAMETERS :
13617 * @stream_list : streams to be configured
13618 *
13619 * RETURN : NO_ERROR on success
13620 * -EINVAL on failure
13621 *
13622 *==========================================================================*/
13623int QCamera3HardwareInterface::validateStreamRotations(
13624 camera3_stream_configuration_t *streamList)
13625{
13626 int rc = NO_ERROR;
13627
13628 /*
13629 * Loop through all streams requested in configuration
13630 * Check if unsupported rotations have been requested on any of them
13631 */
13632 for (size_t j = 0; j < streamList->num_streams; j++){
13633 camera3_stream_t *newStream = streamList->streams[j];
13634
13635 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13636 bool isImplDef = (newStream->format ==
13637 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13638 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13639 isImplDef);
13640
13641 if (isRotated && (!isImplDef || isZsl)) {
13642 LOGE("Error: Unsupported rotation of %d requested for stream"
13643 "type:%d and stream format:%d",
13644 newStream->rotation, newStream->stream_type,
13645 newStream->format);
13646 rc = -EINVAL;
13647 break;
13648 }
13649 }
13650
13651 return rc;
13652}
13653
13654/*===========================================================================
13655* FUNCTION : getFlashInfo
13656*
13657* DESCRIPTION: Retrieve information about whether the device has a flash.
13658*
13659* PARAMETERS :
13660* @cameraId : Camera id to query
13661* @hasFlash : Boolean indicating whether there is a flash device
13662* associated with given camera
13663* @flashNode : If a flash device exists, this will be its device node.
13664*
13665* RETURN :
13666* None
13667*==========================================================================*/
13668void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13669 bool& hasFlash,
13670 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13671{
13672 cam_capability_t* camCapability = gCamCapability[cameraId];
13673 if (NULL == camCapability) {
13674 hasFlash = false;
13675 flashNode[0] = '\0';
13676 } else {
13677 hasFlash = camCapability->flash_available;
13678 strlcpy(flashNode,
13679 (char*)camCapability->flash_dev_name,
13680 QCAMERA_MAX_FILEPATH_LENGTH);
13681 }
13682}
13683
13684/*===========================================================================
13685* FUNCTION : getEepromVersionInfo
13686*
13687* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13688*
13689* PARAMETERS : None
13690*
13691* RETURN : string describing EEPROM version
13692* "\0" if no such info available
13693*==========================================================================*/
13694const char *QCamera3HardwareInterface::getEepromVersionInfo()
13695{
13696 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13697}
13698
13699/*===========================================================================
13700* FUNCTION : getLdafCalib
13701*
13702* DESCRIPTION: Retrieve Laser AF calibration data
13703*
13704* PARAMETERS : None
13705*
13706* RETURN : Two uint32_t describing laser AF calibration data
13707* NULL if none is available.
13708*==========================================================================*/
13709const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13710{
13711 if (mLdafCalibExist) {
13712 return &mLdafCalib[0];
13713 } else {
13714 return NULL;
13715 }
13716}
13717
13718/*===========================================================================
13719 * FUNCTION : dynamicUpdateMetaStreamInfo
13720 *
13721 * DESCRIPTION: This function:
13722 * (1) stops all the channels
13723 * (2) returns error on pending requests and buffers
13724 * (3) sends metastream_info in setparams
13725 * (4) starts all channels
13726 * This is useful when sensor has to be restarted to apply any
13727 * settings such as frame rate from a different sensor mode
13728 *
13729 * PARAMETERS : None
13730 *
13731 * RETURN : NO_ERROR on success
13732 * Error codes on failure
13733 *
13734 *==========================================================================*/
13735int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13736{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013737 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013738 int rc = NO_ERROR;
13739
13740 LOGD("E");
13741
13742 rc = stopAllChannels();
13743 if (rc < 0) {
13744 LOGE("stopAllChannels failed");
13745 return rc;
13746 }
13747
13748 rc = notifyErrorForPendingRequests();
13749 if (rc < 0) {
13750 LOGE("notifyErrorForPendingRequests failed");
13751 return rc;
13752 }
13753
13754 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13755 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13756 "Format:%d",
13757 mStreamConfigInfo.type[i],
13758 mStreamConfigInfo.stream_sizes[i].width,
13759 mStreamConfigInfo.stream_sizes[i].height,
13760 mStreamConfigInfo.postprocess_mask[i],
13761 mStreamConfigInfo.format[i]);
13762 }
13763
13764 /* Send meta stream info once again so that ISP can start */
13765 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13766 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13767 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13768 mParameters);
13769 if (rc < 0) {
13770 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13771 }
13772
13773 rc = startAllChannels();
13774 if (rc < 0) {
13775 LOGE("startAllChannels failed");
13776 return rc;
13777 }
13778
13779 LOGD("X");
13780 return rc;
13781}
13782
13783/*===========================================================================
13784 * FUNCTION : stopAllChannels
13785 *
13786 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13787 *
13788 * PARAMETERS : None
13789 *
13790 * RETURN : NO_ERROR on success
13791 * Error codes on failure
13792 *
13793 *==========================================================================*/
13794int32_t QCamera3HardwareInterface::stopAllChannels()
13795{
13796 int32_t rc = NO_ERROR;
13797
13798 LOGD("Stopping all channels");
13799 // Stop the Streams/Channels
13800 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13801 it != mStreamInfo.end(); it++) {
13802 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13803 if (channel) {
13804 channel->stop();
13805 }
13806 (*it)->status = INVALID;
13807 }
13808
13809 if (mSupportChannel) {
13810 mSupportChannel->stop();
13811 }
13812 if (mAnalysisChannel) {
13813 mAnalysisChannel->stop();
13814 }
13815 if (mRawDumpChannel) {
13816 mRawDumpChannel->stop();
13817 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013818 if (mHdrPlusRawSrcChannel) {
13819 mHdrPlusRawSrcChannel->stop();
13820 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013821 if (mMetadataChannel) {
13822 /* If content of mStreamInfo is not 0, there is metadata stream */
13823 mMetadataChannel->stop();
13824 }
13825
13826 LOGD("All channels stopped");
13827 return rc;
13828}
13829
13830/*===========================================================================
13831 * FUNCTION : startAllChannels
13832 *
13833 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13834 *
13835 * PARAMETERS : None
13836 *
13837 * RETURN : NO_ERROR on success
13838 * Error codes on failure
13839 *
13840 *==========================================================================*/
13841int32_t QCamera3HardwareInterface::startAllChannels()
13842{
13843 int32_t rc = NO_ERROR;
13844
13845 LOGD("Start all channels ");
13846 // Start the Streams/Channels
13847 if (mMetadataChannel) {
13848 /* If content of mStreamInfo is not 0, there is metadata stream */
13849 rc = mMetadataChannel->start();
13850 if (rc < 0) {
13851 LOGE("META channel start failed");
13852 return rc;
13853 }
13854 }
13855 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13856 it != mStreamInfo.end(); it++) {
13857 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13858 if (channel) {
13859 rc = channel->start();
13860 if (rc < 0) {
13861 LOGE("channel start failed");
13862 return rc;
13863 }
13864 }
13865 }
13866 if (mAnalysisChannel) {
13867 mAnalysisChannel->start();
13868 }
13869 if (mSupportChannel) {
13870 rc = mSupportChannel->start();
13871 if (rc < 0) {
13872 LOGE("Support channel start failed");
13873 return rc;
13874 }
13875 }
13876 if (mRawDumpChannel) {
13877 rc = mRawDumpChannel->start();
13878 if (rc < 0) {
13879 LOGE("RAW dump channel start failed");
13880 return rc;
13881 }
13882 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013883 if (mHdrPlusRawSrcChannel) {
13884 rc = mHdrPlusRawSrcChannel->start();
13885 if (rc < 0) {
13886 LOGE("HDR+ RAW channel start failed");
13887 return rc;
13888 }
13889 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013890
13891 LOGD("All channels started");
13892 return rc;
13893}
13894
13895/*===========================================================================
13896 * FUNCTION : notifyErrorForPendingRequests
13897 *
13898 * DESCRIPTION: This function sends error for all the pending requests/buffers
13899 *
13900 * PARAMETERS : None
13901 *
13902 * RETURN : Error codes
13903 * NO_ERROR on success
13904 *
13905 *==========================================================================*/
13906int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13907{
Emilian Peev7650c122017-01-19 08:24:33 -080013908 notifyErrorFoPendingDepthData(mDepthChannel);
13909
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013910 auto pendingRequest = mPendingRequestsList.begin();
13911 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070013912
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013913 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
13914 // buffers (for which buffers aren't sent yet).
13915 while (pendingRequest != mPendingRequestsList.end() ||
13916 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
13917 if (pendingRequest == mPendingRequestsList.end() ||
13918 pendingBuffer->frame_number < pendingRequest->frame_number) {
13919 // If metadata for this frame was sent, notify about a buffer error and returns buffers
13920 // with error.
13921 for (auto &info : pendingBuffer->mPendingBufferList) {
13922 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070013923 camera3_notify_msg_t notify_msg;
13924 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13925 notify_msg.type = CAMERA3_MSG_ERROR;
13926 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013927 notify_msg.message.error.error_stream = info.stream;
13928 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013929 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013930
13931 camera3_stream_buffer_t buffer = {};
13932 buffer.acquire_fence = -1;
13933 buffer.release_fence = -1;
13934 buffer.buffer = info.buffer;
13935 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13936 buffer.stream = info.stream;
13937 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070013938 }
13939
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013940 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13941 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
13942 pendingBuffer->frame_number > pendingRequest->frame_number) {
13943 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070013944 camera3_notify_msg_t notify_msg;
13945 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13946 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013947 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
13948 notify_msg.message.error.error_stream = nullptr;
13949 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013950 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013951
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013952 if (pendingRequest->input_buffer != nullptr) {
13953 camera3_capture_result result = {};
13954 result.frame_number = pendingRequest->frame_number;
13955 result.result = nullptr;
13956 result.input_buffer = pendingRequest->input_buffer;
13957 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013958 }
13959
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013960 mShutterDispatcher.clear(pendingRequest->frame_number);
13961 pendingRequest = mPendingRequestsList.erase(pendingRequest);
13962 } else {
13963 // If both buffers and result metadata weren't sent yet, notify about a request error
13964 // and return buffers with error.
13965 for (auto &info : pendingBuffer->mPendingBufferList) {
13966 camera3_notify_msg_t notify_msg;
13967 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13968 notify_msg.type = CAMERA3_MSG_ERROR;
13969 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13970 notify_msg.message.error.error_stream = info.stream;
13971 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
13972 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013973
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013974 camera3_stream_buffer_t buffer = {};
13975 buffer.acquire_fence = -1;
13976 buffer.release_fence = -1;
13977 buffer.buffer = info.buffer;
13978 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13979 buffer.stream = info.stream;
13980 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
13981 }
13982
13983 if (pendingRequest->input_buffer != nullptr) {
13984 camera3_capture_result result = {};
13985 result.frame_number = pendingRequest->frame_number;
13986 result.result = nullptr;
13987 result.input_buffer = pendingRequest->input_buffer;
13988 orchestrateResult(&result);
13989 }
13990
13991 mShutterDispatcher.clear(pendingRequest->frame_number);
13992 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13993 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070013994 }
13995 }
13996
13997 /* Reset pending frame Drop list and requests list */
13998 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013999 mShutterDispatcher.clear();
14000 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014001 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014002 LOGH("Cleared all the pending buffers ");
14003
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014004 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014005}
14006
14007bool QCamera3HardwareInterface::isOnEncoder(
14008 const cam_dimension_t max_viewfinder_size,
14009 uint32_t width, uint32_t height)
14010{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014011 return ((width > (uint32_t)max_viewfinder_size.width) ||
14012 (height > (uint32_t)max_viewfinder_size.height) ||
14013 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14014 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014015}
14016
14017/*===========================================================================
14018 * FUNCTION : setBundleInfo
14019 *
14020 * DESCRIPTION: Set bundle info for all streams that are bundle.
14021 *
14022 * PARAMETERS : None
14023 *
14024 * RETURN : NO_ERROR on success
14025 * Error codes on failure
14026 *==========================================================================*/
14027int32_t QCamera3HardwareInterface::setBundleInfo()
14028{
14029 int32_t rc = NO_ERROR;
14030
14031 if (mChannelHandle) {
14032 cam_bundle_config_t bundleInfo;
14033 memset(&bundleInfo, 0, sizeof(bundleInfo));
14034 rc = mCameraHandle->ops->get_bundle_info(
14035 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14036 if (rc != NO_ERROR) {
14037 LOGE("get_bundle_info failed");
14038 return rc;
14039 }
14040 if (mAnalysisChannel) {
14041 mAnalysisChannel->setBundleInfo(bundleInfo);
14042 }
14043 if (mSupportChannel) {
14044 mSupportChannel->setBundleInfo(bundleInfo);
14045 }
14046 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14047 it != mStreamInfo.end(); it++) {
14048 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14049 channel->setBundleInfo(bundleInfo);
14050 }
14051 if (mRawDumpChannel) {
14052 mRawDumpChannel->setBundleInfo(bundleInfo);
14053 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014054 if (mHdrPlusRawSrcChannel) {
14055 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14056 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014057 }
14058
14059 return rc;
14060}
14061
14062/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014063 * FUNCTION : setInstantAEC
14064 *
14065 * DESCRIPTION: Set Instant AEC related params.
14066 *
14067 * PARAMETERS :
14068 * @meta: CameraMetadata reference
14069 *
14070 * RETURN : NO_ERROR on success
14071 * Error codes on failure
14072 *==========================================================================*/
14073int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14074{
14075 int32_t rc = NO_ERROR;
14076 uint8_t val = 0;
14077 char prop[PROPERTY_VALUE_MAX];
14078
14079 // First try to configure instant AEC from framework metadata
14080 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14081 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14082 }
14083
14084 // If framework did not set this value, try to read from set prop.
14085 if (val == 0) {
14086 memset(prop, 0, sizeof(prop));
14087 property_get("persist.camera.instant.aec", prop, "0");
14088 val = (uint8_t)atoi(prop);
14089 }
14090
14091 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14092 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14093 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14094 mInstantAEC = val;
14095 mInstantAECSettledFrameNumber = 0;
14096 mInstantAecFrameIdxCount = 0;
14097 LOGH("instantAEC value set %d",val);
14098 if (mInstantAEC) {
14099 memset(prop, 0, sizeof(prop));
14100 property_get("persist.camera.ae.instant.bound", prop, "10");
14101 int32_t aec_frame_skip_cnt = atoi(prop);
14102 if (aec_frame_skip_cnt >= 0) {
14103 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14104 } else {
14105 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14106 rc = BAD_VALUE;
14107 }
14108 }
14109 } else {
14110 LOGE("Bad instant aec value set %d", val);
14111 rc = BAD_VALUE;
14112 }
14113 return rc;
14114}
14115
14116/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014117 * FUNCTION : get_num_overall_buffers
14118 *
14119 * DESCRIPTION: Estimate number of pending buffers across all requests.
14120 *
14121 * PARAMETERS : None
14122 *
14123 * RETURN : Number of overall pending buffers
14124 *
14125 *==========================================================================*/
14126uint32_t PendingBuffersMap::get_num_overall_buffers()
14127{
14128 uint32_t sum_buffers = 0;
14129 for (auto &req : mPendingBuffersInRequest) {
14130 sum_buffers += req.mPendingBufferList.size();
14131 }
14132 return sum_buffers;
14133}
14134
14135/*===========================================================================
14136 * FUNCTION : removeBuf
14137 *
14138 * DESCRIPTION: Remove a matching buffer from tracker.
14139 *
14140 * PARAMETERS : @buffer: image buffer for the callback
14141 *
14142 * RETURN : None
14143 *
14144 *==========================================================================*/
14145void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14146{
14147 bool buffer_found = false;
14148 for (auto req = mPendingBuffersInRequest.begin();
14149 req != mPendingBuffersInRequest.end(); req++) {
14150 for (auto k = req->mPendingBufferList.begin();
14151 k != req->mPendingBufferList.end(); k++ ) {
14152 if (k->buffer == buffer) {
14153 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14154 req->frame_number, buffer);
14155 k = req->mPendingBufferList.erase(k);
14156 if (req->mPendingBufferList.empty()) {
14157 // Remove this request from Map
14158 req = mPendingBuffersInRequest.erase(req);
14159 }
14160 buffer_found = true;
14161 break;
14162 }
14163 }
14164 if (buffer_found) {
14165 break;
14166 }
14167 }
14168 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14169 get_num_overall_buffers());
14170}
14171
14172/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014173 * FUNCTION : getBufErrStatus
14174 *
14175 * DESCRIPTION: get buffer error status
14176 *
14177 * PARAMETERS : @buffer: buffer handle
14178 *
14179 * RETURN : Error status
14180 *
14181 *==========================================================================*/
14182int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14183{
14184 for (auto& req : mPendingBuffersInRequest) {
14185 for (auto& k : req.mPendingBufferList) {
14186 if (k.buffer == buffer)
14187 return k.bufStatus;
14188 }
14189 }
14190 return CAMERA3_BUFFER_STATUS_OK;
14191}
14192
14193/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014194 * FUNCTION : setPAAFSupport
14195 *
14196 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14197 * feature mask according to stream type and filter
14198 * arrangement
14199 *
14200 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14201 * @stream_type: stream type
14202 * @filter_arrangement: filter arrangement
14203 *
14204 * RETURN : None
14205 *==========================================================================*/
14206void QCamera3HardwareInterface::setPAAFSupport(
14207 cam_feature_mask_t& feature_mask,
14208 cam_stream_type_t stream_type,
14209 cam_color_filter_arrangement_t filter_arrangement)
14210{
Thierry Strudel3d639192016-09-09 11:52:26 -070014211 switch (filter_arrangement) {
14212 case CAM_FILTER_ARRANGEMENT_RGGB:
14213 case CAM_FILTER_ARRANGEMENT_GRBG:
14214 case CAM_FILTER_ARRANGEMENT_GBRG:
14215 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014216 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14217 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014218 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014219 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14220 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014221 }
14222 break;
14223 case CAM_FILTER_ARRANGEMENT_Y:
14224 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14225 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14226 }
14227 break;
14228 default:
14229 break;
14230 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014231 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14232 feature_mask, stream_type, filter_arrangement);
14233
14234
Thierry Strudel3d639192016-09-09 11:52:26 -070014235}
14236
14237/*===========================================================================
14238* FUNCTION : getSensorMountAngle
14239*
14240* DESCRIPTION: Retrieve sensor mount angle
14241*
14242* PARAMETERS : None
14243*
14244* RETURN : sensor mount angle in uint32_t
14245*==========================================================================*/
14246uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14247{
14248 return gCamCapability[mCameraId]->sensor_mount_angle;
14249}
14250
14251/*===========================================================================
14252* FUNCTION : getRelatedCalibrationData
14253*
14254* DESCRIPTION: Retrieve related system calibration data
14255*
14256* PARAMETERS : None
14257*
14258* RETURN : Pointer of related system calibration data
14259*==========================================================================*/
14260const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14261{
14262 return (const cam_related_system_calibration_data_t *)
14263 &(gCamCapability[mCameraId]->related_cam_calibration);
14264}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014265
14266/*===========================================================================
14267 * FUNCTION : is60HzZone
14268 *
14269 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14270 *
14271 * PARAMETERS : None
14272 *
14273 * RETURN : True if in 60Hz zone, False otherwise
14274 *==========================================================================*/
14275bool QCamera3HardwareInterface::is60HzZone()
14276{
14277 time_t t = time(NULL);
14278 struct tm lt;
14279
14280 struct tm* r = localtime_r(&t, &lt);
14281
14282 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14283 return true;
14284 else
14285 return false;
14286}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014287
14288/*===========================================================================
14289 * FUNCTION : adjustBlackLevelForCFA
14290 *
14291 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14292 * of bayer CFA (Color Filter Array).
14293 *
14294 * PARAMETERS : @input: black level pattern in the order of RGGB
14295 * @output: black level pattern in the order of CFA
14296 * @color_arrangement: CFA color arrangement
14297 *
14298 * RETURN : None
14299 *==========================================================================*/
14300template<typename T>
14301void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14302 T input[BLACK_LEVEL_PATTERN_CNT],
14303 T output[BLACK_LEVEL_PATTERN_CNT],
14304 cam_color_filter_arrangement_t color_arrangement)
14305{
14306 switch (color_arrangement) {
14307 case CAM_FILTER_ARRANGEMENT_GRBG:
14308 output[0] = input[1];
14309 output[1] = input[0];
14310 output[2] = input[3];
14311 output[3] = input[2];
14312 break;
14313 case CAM_FILTER_ARRANGEMENT_GBRG:
14314 output[0] = input[2];
14315 output[1] = input[3];
14316 output[2] = input[0];
14317 output[3] = input[1];
14318 break;
14319 case CAM_FILTER_ARRANGEMENT_BGGR:
14320 output[0] = input[3];
14321 output[1] = input[2];
14322 output[2] = input[1];
14323 output[3] = input[0];
14324 break;
14325 case CAM_FILTER_ARRANGEMENT_RGGB:
14326 output[0] = input[0];
14327 output[1] = input[1];
14328 output[2] = input[2];
14329 output[3] = input[3];
14330 break;
14331 default:
14332 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14333 break;
14334 }
14335}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014336
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014337void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14338 CameraMetadata &resultMetadata,
14339 std::shared_ptr<metadata_buffer_t> settings)
14340{
14341 if (settings == nullptr) {
14342 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14343 return;
14344 }
14345
14346 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14347 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14348 }
14349
14350 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14351 String8 str((const char *)gps_methods);
14352 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14353 }
14354
14355 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14356 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14357 }
14358
14359 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14360 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14361 }
14362
14363 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14364 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14365 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14366 }
14367
14368 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14369 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14370 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14371 }
14372
14373 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14374 int32_t fwk_thumb_size[2];
14375 fwk_thumb_size[0] = thumb_size->width;
14376 fwk_thumb_size[1] = thumb_size->height;
14377 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14378 }
14379
14380 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14381 uint8_t fwk_intent = intent[0];
14382 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14383 }
14384}
14385
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014386bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14387 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14388 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014389{
14390 if (hdrPlusRequest == nullptr) return false;
14391
14392 // Check noise reduction mode is high quality.
14393 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14394 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14395 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014396 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14397 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014398 return false;
14399 }
14400
14401 // Check edge mode is high quality.
14402 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14403 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14404 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14405 return false;
14406 }
14407
14408 if (request.num_output_buffers != 1 ||
14409 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14410 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014411 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14412 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14413 request.output_buffers[0].stream->width,
14414 request.output_buffers[0].stream->height,
14415 request.output_buffers[0].stream->format);
14416 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014417 return false;
14418 }
14419
14420 // Get a YUV buffer from pic channel.
14421 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14422 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14423 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14424 if (res != OK) {
14425 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14426 __FUNCTION__, strerror(-res), res);
14427 return false;
14428 }
14429
14430 pbcamera::StreamBuffer buffer;
14431 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014432 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014433 buffer.data = yuvBuffer->buffer;
14434 buffer.dataSize = yuvBuffer->frame_len;
14435
14436 pbcamera::CaptureRequest pbRequest;
14437 pbRequest.id = request.frame_number;
14438 pbRequest.outputBuffers.push_back(buffer);
14439
14440 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014441 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014442 if (res != OK) {
14443 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14444 strerror(-res), res);
14445 return false;
14446 }
14447
14448 hdrPlusRequest->yuvBuffer = yuvBuffer;
14449 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14450
14451 return true;
14452}
14453
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014454status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14455{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014456 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14457 return OK;
14458 }
14459
14460 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14461 if (res != OK) {
14462 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14463 strerror(-res), res);
14464 return res;
14465 }
14466 gHdrPlusClientOpening = true;
14467
14468 return OK;
14469}
14470
Chien-Yu Chenee335912017-02-09 17:53:20 -080014471status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14472{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014473 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014474
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014475 if (mHdrPlusModeEnabled) {
14476 return OK;
14477 }
14478
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014479 // Check if gHdrPlusClient is opened or being opened.
14480 if (gHdrPlusClient == nullptr) {
14481 if (gHdrPlusClientOpening) {
14482 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14483 return OK;
14484 }
14485
14486 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014487 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014488 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14489 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014490 return res;
14491 }
14492
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014493 // When opening HDR+ client completes, HDR+ mode will be enabled.
14494 return OK;
14495
Chien-Yu Chenee335912017-02-09 17:53:20 -080014496 }
14497
14498 // Configure stream for HDR+.
14499 res = configureHdrPlusStreamsLocked();
14500 if (res != OK) {
14501 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014502 return res;
14503 }
14504
14505 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14506 res = gHdrPlusClient->setZslHdrPlusMode(true);
14507 if (res != OK) {
14508 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014509 return res;
14510 }
14511
14512 mHdrPlusModeEnabled = true;
14513 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14514
14515 return OK;
14516}
14517
14518void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14519{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014520 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014521 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014522 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14523 if (res != OK) {
14524 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14525 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014526
14527 // Close HDR+ client so Easel can enter low power mode.
14528 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14529 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014530 }
14531
14532 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014533 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014534 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14535}
14536
14537status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014538{
14539 pbcamera::InputConfiguration inputConfig;
14540 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14541 status_t res = OK;
14542
14543 // Configure HDR+ client streams.
14544 // Get input config.
14545 if (mHdrPlusRawSrcChannel) {
14546 // HDR+ input buffers will be provided by HAL.
14547 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14548 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14549 if (res != OK) {
14550 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14551 __FUNCTION__, strerror(-res), res);
14552 return res;
14553 }
14554
14555 inputConfig.isSensorInput = false;
14556 } else {
14557 // Sensor MIPI will send data to Easel.
14558 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014559 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014560 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14561 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14562 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14563 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14564 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014565 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014566 if (mSensorModeInfo.num_raw_bits != 10) {
14567 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14568 mSensorModeInfo.num_raw_bits);
14569 return BAD_VALUE;
14570 }
14571
14572 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014573 }
14574
14575 // Get output configurations.
14576 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014577 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014578
14579 // Easel may need to output YUV output buffers if mPictureChannel was created.
14580 pbcamera::StreamConfiguration yuvOutputConfig;
14581 if (mPictureChannel != nullptr) {
14582 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14583 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14584 if (res != OK) {
14585 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14586 __FUNCTION__, strerror(-res), res);
14587
14588 return res;
14589 }
14590
14591 outputStreamConfigs.push_back(yuvOutputConfig);
14592 }
14593
14594 // TODO: consider other channels for YUV output buffers.
14595
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014596 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014597 if (res != OK) {
14598 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14599 strerror(-res), res);
14600 return res;
14601 }
14602
14603 return OK;
14604}
14605
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014606void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14607{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014608 if (client == nullptr) {
14609 ALOGE("%s: Opened client is null.", __FUNCTION__);
14610 return;
14611 }
14612
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014613 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014614 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14615
14616 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014617 if (!gHdrPlusClientOpening) {
14618 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14619 return;
14620 }
14621
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014622 gHdrPlusClient = std::move(client);
14623 gHdrPlusClientOpening = false;
14624
14625 // Set static metadata.
14626 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14627 if (res != OK) {
14628 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14629 __FUNCTION__, strerror(-res), res);
14630 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14631 gHdrPlusClient = nullptr;
14632 return;
14633 }
14634
14635 // Enable HDR+ mode.
14636 res = enableHdrPlusModeLocked();
14637 if (res != OK) {
14638 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14639 }
14640}
14641
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014642void QCamera3HardwareInterface::onOpenFailed(status_t err)
14643{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014644 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14645 Mutex::Autolock l(gHdrPlusClientLock);
14646 gHdrPlusClientOpening = false;
14647}
14648
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014649void QCamera3HardwareInterface::onFatalError()
14650{
14651 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14652
14653 // Set HAL state to error.
14654 pthread_mutex_lock(&mMutex);
14655 mState = ERROR;
14656 pthread_mutex_unlock(&mMutex);
14657
14658 handleCameraDeviceError();
14659}
14660
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014661void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014662 const camera_metadata_t &resultMetadata)
14663{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014664 if (result != nullptr) {
14665 if (result->outputBuffers.size() != 1) {
14666 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14667 result->outputBuffers.size());
14668 return;
14669 }
14670
14671 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14672 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14673 result->outputBuffers[0].streamId);
14674 return;
14675 }
14676
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014677 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014678 HdrPlusPendingRequest pendingRequest;
14679 {
14680 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14681 auto req = mHdrPlusPendingRequests.find(result->requestId);
14682 pendingRequest = req->second;
14683 }
14684
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014685 // Update the result metadata with the settings of the HDR+ still capture request because
14686 // the result metadata belongs to a ZSL buffer.
14687 CameraMetadata metadata;
14688 metadata = &resultMetadata;
14689 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14690 camera_metadata_t* updatedResultMetadata = metadata.release();
14691
14692 QCamera3PicChannel *picChannel =
14693 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14694
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014695 // Check if dumping HDR+ YUV output is enabled.
14696 char prop[PROPERTY_VALUE_MAX];
14697 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14698 bool dumpYuvOutput = atoi(prop);
14699
14700 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014701 // Dump yuv buffer to a ppm file.
14702 pbcamera::StreamConfiguration outputConfig;
14703 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14704 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14705 if (rc == OK) {
14706 char buf[FILENAME_MAX] = {};
14707 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14708 result->requestId, result->outputBuffers[0].streamId,
14709 outputConfig.image.width, outputConfig.image.height);
14710
14711 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14712 } else {
14713 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14714 __FUNCTION__, strerror(-rc), rc);
14715 }
14716 }
14717
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014718 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14719 auto halMetadata = std::make_shared<metadata_buffer_t>();
14720 clear_metadata_buffer(halMetadata.get());
14721
14722 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14723 // encoding.
14724 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14725 halStreamId, /*minFrameDuration*/0);
14726 if (res == OK) {
14727 // Return the buffer to pic channel for encoding.
14728 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14729 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14730 halMetadata);
14731 } else {
14732 // Return the buffer without encoding.
14733 // TODO: This should not happen but we may want to report an error buffer to camera
14734 // service.
14735 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14736 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14737 strerror(-res), res);
14738 }
14739
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014740 // Find the timestamp
14741 camera_metadata_ro_entry_t entry;
14742 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14743 ANDROID_SENSOR_TIMESTAMP, &entry);
14744 if (res != OK) {
14745 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14746 __FUNCTION__, result->requestId, strerror(-res), res);
14747 } else {
14748 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14749 }
14750
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014751 // Send HDR+ metadata to framework.
14752 {
14753 pthread_mutex_lock(&mMutex);
14754
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014755 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14756 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014757 pthread_mutex_unlock(&mMutex);
14758 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014759
14760 // Remove the HDR+ pending request.
14761 {
14762 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14763 auto req = mHdrPlusPendingRequests.find(result->requestId);
14764 mHdrPlusPendingRequests.erase(req);
14765 }
14766 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014767}
14768
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014769void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14770{
14771 if (failedResult == nullptr) {
14772 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14773 return;
14774 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014775
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014776 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014777
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014778 // Remove the pending HDR+ request.
14779 {
14780 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14781 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14782
14783 // Return the buffer to pic channel.
14784 QCamera3PicChannel *picChannel =
14785 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14786 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14787
14788 mHdrPlusPendingRequests.erase(pendingRequest);
14789 }
14790
14791 pthread_mutex_lock(&mMutex);
14792
14793 // Find the pending buffers.
14794 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14795 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14796 if (pendingBuffers->frame_number == failedResult->requestId) {
14797 break;
14798 }
14799 pendingBuffers++;
14800 }
14801
14802 // Send out buffer errors for the pending buffers.
14803 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14804 std::vector<camera3_stream_buffer_t> streamBuffers;
14805 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14806 // Prepare a stream buffer.
14807 camera3_stream_buffer_t streamBuffer = {};
14808 streamBuffer.stream = buffer.stream;
14809 streamBuffer.buffer = buffer.buffer;
14810 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14811 streamBuffer.acquire_fence = -1;
14812 streamBuffer.release_fence = -1;
14813
14814 streamBuffers.push_back(streamBuffer);
14815
14816 // Send out error buffer event.
14817 camera3_notify_msg_t notify_msg = {};
14818 notify_msg.type = CAMERA3_MSG_ERROR;
14819 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14820 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14821 notify_msg.message.error.error_stream = buffer.stream;
14822
14823 orchestrateNotify(&notify_msg);
14824 }
14825
14826 camera3_capture_result_t result = {};
14827 result.frame_number = pendingBuffers->frame_number;
14828 result.num_output_buffers = streamBuffers.size();
14829 result.output_buffers = &streamBuffers[0];
14830
14831 // Send out result with buffer errors.
14832 orchestrateResult(&result);
14833
14834 // Remove pending buffers.
14835 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14836 }
14837
14838 // Remove pending request.
14839 auto halRequest = mPendingRequestsList.begin();
14840 while (halRequest != mPendingRequestsList.end()) {
14841 if (halRequest->frame_number == failedResult->requestId) {
14842 mPendingRequestsList.erase(halRequest);
14843 break;
14844 }
14845 halRequest++;
14846 }
14847
14848 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014849}
14850
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014851
14852ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14853 mParent(parent) {}
14854
14855void ShutterDispatcher::expectShutter(uint32_t frameNumber)
14856{
14857 std::lock_guard<std::mutex> lock(mLock);
14858 mShutters.emplace(frameNumber, Shutter());
14859}
14860
14861void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
14862{
14863 std::lock_guard<std::mutex> lock(mLock);
14864
14865 // Make this frame's shutter ready.
14866 auto shutter = mShutters.find(frameNumber);
14867 if (shutter == mShutters.end()) {
14868 // Shutter was already sent.
14869 return;
14870 }
14871
14872 shutter->second.ready = true;
14873 shutter->second.timestamp = timestamp;
14874
14875 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
14876 shutter = mShutters.begin();
14877 while (shutter != mShutters.end()) {
14878 if (!shutter->second.ready) {
14879 // If this shutter is not ready, the following shutters can't be sent.
14880 break;
14881 }
14882
14883 camera3_notify_msg_t msg = {};
14884 msg.type = CAMERA3_MSG_SHUTTER;
14885 msg.message.shutter.frame_number = shutter->first;
14886 msg.message.shutter.timestamp = shutter->second.timestamp;
14887 mParent->orchestrateNotify(&msg);
14888
14889 shutter = mShutters.erase(shutter);
14890 }
14891}
14892
14893void ShutterDispatcher::clear(uint32_t frameNumber)
14894{
14895 std::lock_guard<std::mutex> lock(mLock);
14896 mShutters.erase(frameNumber);
14897}
14898
14899void ShutterDispatcher::clear()
14900{
14901 std::lock_guard<std::mutex> lock(mLock);
14902
14903 // Log errors for stale shutters.
14904 for (auto &shutter : mShutters) {
14905 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
14906 __FUNCTION__, shutter.first, shutter.second.ready,
14907 shutter.second.timestamp);
14908 }
14909 mShutters.clear();
14910}
14911
14912OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
14913 mParent(parent) {}
14914
14915status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
14916{
14917 std::lock_guard<std::mutex> lock(mLock);
14918 mStreamBuffers.clear();
14919 if (!streamList) {
14920 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
14921 return -EINVAL;
14922 }
14923
14924 // Create a "frame-number -> buffer" map for each stream.
14925 for (uint32_t i = 0; i < streamList->num_streams; i++) {
14926 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
14927 }
14928
14929 return OK;
14930}
14931
14932status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
14933{
14934 std::lock_guard<std::mutex> lock(mLock);
14935
14936 // Find the "frame-number -> buffer" map for the stream.
14937 auto buffers = mStreamBuffers.find(stream);
14938 if (buffers == mStreamBuffers.end()) {
14939 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
14940 return -EINVAL;
14941 }
14942
14943 // Create an unready buffer for this frame number.
14944 buffers->second.emplace(frameNumber, Buffer());
14945 return OK;
14946}
14947
14948void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
14949 const camera3_stream_buffer_t &buffer)
14950{
14951 std::lock_guard<std::mutex> lock(mLock);
14952
14953 // Find the frame number -> buffer map for the stream.
14954 auto buffers = mStreamBuffers.find(buffer.stream);
14955 if (buffers == mStreamBuffers.end()) {
14956 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
14957 return;
14958 }
14959
14960 // Find the unready buffer this frame number and mark it ready.
14961 auto pendingBuffer = buffers->second.find(frameNumber);
14962 if (pendingBuffer == buffers->second.end()) {
14963 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
14964 return;
14965 }
14966
14967 pendingBuffer->second.ready = true;
14968 pendingBuffer->second.buffer = buffer;
14969
14970 // Iterate through the buffers and send out buffers until the one that's not ready yet.
14971 pendingBuffer = buffers->second.begin();
14972 while (pendingBuffer != buffers->second.end()) {
14973 if (!pendingBuffer->second.ready) {
14974 // If this buffer is not ready, the following buffers can't be sent.
14975 break;
14976 }
14977
14978 camera3_capture_result_t result = {};
14979 result.frame_number = pendingBuffer->first;
14980 result.num_output_buffers = 1;
14981 result.output_buffers = &pendingBuffer->second.buffer;
14982
14983 // Send out result with buffer errors.
14984 mParent->orchestrateResult(&result);
14985
14986 pendingBuffer = buffers->second.erase(pendingBuffer);
14987 }
14988}
14989
14990void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
14991{
14992 std::lock_guard<std::mutex> lock(mLock);
14993
14994 // Log errors for stale buffers.
14995 for (auto &buffers : mStreamBuffers) {
14996 for (auto &buffer : buffers.second) {
14997 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
14998 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
14999 }
15000 buffers.second.clear();
15001 }
15002
15003 if (clearConfiguredStreams) {
15004 mStreamBuffers.clear();
15005 }
15006}
15007
Thierry Strudel3d639192016-09-09 11:52:26 -070015008}; //end namespace qcamera