blob: 4405137e8ed697c1dc0c66875a7e446833041603 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Chien-Yu Chen3b630e52017-06-02 15:39:47 -0700136// TODO: Enable HDR+ for front camera after it's supported. b/37100623.
137#define ENABLE_HDRPLUS_FOR_FRONT_CAMERA 0
138
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700139// Whether to check for the GPU stride padding, or use the default
140//#define CHECK_GPU_PIXEL_ALIGNMENT
141
Thierry Strudel3d639192016-09-09 11:52:26 -0700142cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
143const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
144extern pthread_mutex_t gCamLock;
145volatile uint32_t gCamHal3LogLevel = 1;
146extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800148// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149// The following Easel related variables must be protected by gHdrPlusClientLock.
150EaselManagerClient gEaselManagerClient;
151bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
152std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
153bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700154bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700155bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800157// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
158bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700159
160Mutex gHdrPlusClientLock; // Protect above Easel related variables.
161
Thierry Strudel3d639192016-09-09 11:52:26 -0700162
163const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
164 {"On", CAM_CDS_MODE_ON},
165 {"Off", CAM_CDS_MODE_OFF},
166 {"Auto",CAM_CDS_MODE_AUTO}
167};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700168const QCamera3HardwareInterface::QCameraMap<
169 camera_metadata_enum_android_video_hdr_mode_t,
170 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
171 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
172 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
173};
174
Thierry Strudel54dc9782017-02-15 12:12:10 -0800175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_binning_correction_mode_t,
177 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
178 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
179 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
180};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700181
182const QCamera3HardwareInterface::QCameraMap<
183 camera_metadata_enum_android_ir_mode_t,
184 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
185 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
186 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
187 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
188};
Thierry Strudel3d639192016-09-09 11:52:26 -0700189
190const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_effect_mode_t,
192 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
193 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
194 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
195 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
196 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
197 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
198 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
199 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
202};
203
204const QCamera3HardwareInterface::QCameraMap<
205 camera_metadata_enum_android_control_awb_mode_t,
206 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
207 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
208 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
209 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
210 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
211 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
212 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
215 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
216};
217
218const QCamera3HardwareInterface::QCameraMap<
219 camera_metadata_enum_android_control_scene_mode_t,
220 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
221 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
222 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
223 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
227 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
228 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
229 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
230 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
231 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
232 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
233 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
234 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
235 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800236 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
237 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_af_mode_t,
242 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
245 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
246 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
247 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_color_correction_aberration_mode_t,
254 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
255 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
256 CAM_COLOR_CORRECTION_ABERRATION_OFF },
257 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
258 CAM_COLOR_CORRECTION_ABERRATION_FAST },
259 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
260 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_control_ae_antibanding_mode_t,
265 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
270};
271
272const QCamera3HardwareInterface::QCameraMap<
273 camera_metadata_enum_android_control_ae_mode_t,
274 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
275 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
278 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
279 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
280};
281
282const QCamera3HardwareInterface::QCameraMap<
283 camera_metadata_enum_android_flash_mode_t,
284 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
285 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
286 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
287 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
288};
289
290const QCamera3HardwareInterface::QCameraMap<
291 camera_metadata_enum_android_statistics_face_detect_mode_t,
292 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
293 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
296};
297
298const QCamera3HardwareInterface::QCameraMap<
299 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
300 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
301 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
302 CAM_FOCUS_UNCALIBRATED },
303 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
304 CAM_FOCUS_APPROXIMATE },
305 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
306 CAM_FOCUS_CALIBRATED }
307};
308
309const QCamera3HardwareInterface::QCameraMap<
310 camera_metadata_enum_android_lens_state_t,
311 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
312 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
313 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
314};
315
316const int32_t available_thumbnail_sizes[] = {0, 0,
317 176, 144,
318 240, 144,
319 256, 144,
320 240, 160,
321 256, 154,
322 240, 240,
323 320, 240};
324
325const QCamera3HardwareInterface::QCameraMap<
326 camera_metadata_enum_android_sensor_test_pattern_mode_t,
327 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
328 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
334};
335
336/* Since there is no mapping for all the options some Android enum are not listed.
337 * Also, the order in this list is important because while mapping from HAL to Android it will
338 * traverse from lower to higher index which means that for HAL values that are map to different
339 * Android values, the traverse logic will select the first one found.
340 */
341const QCamera3HardwareInterface::QCameraMap<
342 camera_metadata_enum_android_sensor_reference_illuminant1_t,
343 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
360};
361
362const QCamera3HardwareInterface::QCameraMap<
363 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
364 { 60, CAM_HFR_MODE_60FPS},
365 { 90, CAM_HFR_MODE_90FPS},
366 { 120, CAM_HFR_MODE_120FPS},
367 { 150, CAM_HFR_MODE_150FPS},
368 { 180, CAM_HFR_MODE_180FPS},
369 { 210, CAM_HFR_MODE_210FPS},
370 { 240, CAM_HFR_MODE_240FPS},
371 { 480, CAM_HFR_MODE_480FPS},
372};
373
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700374const QCamera3HardwareInterface::QCameraMap<
375 qcamera3_ext_instant_aec_mode_t,
376 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
377 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
378 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
380};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800381
382const QCamera3HardwareInterface::QCameraMap<
383 qcamera3_ext_exposure_meter_mode_t,
384 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
385 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
386 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
387 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
388 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
389 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
390 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
391 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
392};
393
394const QCamera3HardwareInterface::QCameraMap<
395 qcamera3_ext_iso_mode_t,
396 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
397 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
398 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
399 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
400 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
401 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
402 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
403 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
404 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
405};
406
Thierry Strudel3d639192016-09-09 11:52:26 -0700407camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
408 .initialize = QCamera3HardwareInterface::initialize,
409 .configure_streams = QCamera3HardwareInterface::configure_streams,
410 .register_stream_buffers = NULL,
411 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
412 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
413 .get_metadata_vendor_tag_ops = NULL,
414 .dump = QCamera3HardwareInterface::dump,
415 .flush = QCamera3HardwareInterface::flush,
416 .reserved = {0},
417};
418
419// initialise to some default value
420uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
421
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700422static inline void logEaselEvent(const char *tag, const char *event) {
423 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
424 struct timespec ts = {};
425 static int64_t kMsPerSec = 1000;
426 static int64_t kNsPerMs = 1000000;
427 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
428 if (res != OK) {
429 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
430 } else {
431 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
432 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
433 }
434 }
435}
436
Thierry Strudel3d639192016-09-09 11:52:26 -0700437/*===========================================================================
438 * FUNCTION : QCamera3HardwareInterface
439 *
440 * DESCRIPTION: constructor of QCamera3HardwareInterface
441 *
442 * PARAMETERS :
443 * @cameraId : camera ID
444 *
445 * RETURN : none
446 *==========================================================================*/
447QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
448 const camera_module_callbacks_t *callbacks)
449 : mCameraId(cameraId),
450 mCameraHandle(NULL),
451 mCameraInitialized(false),
452 mCallbackOps(NULL),
453 mMetadataChannel(NULL),
454 mPictureChannel(NULL),
455 mRawChannel(NULL),
456 mSupportChannel(NULL),
457 mAnalysisChannel(NULL),
458 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700459 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700460 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800461 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800462 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700463 mChannelHandle(0),
464 mFirstConfiguration(true),
465 mFlush(false),
466 mFlushPerf(false),
467 mParamHeap(NULL),
468 mParameters(NULL),
469 mPrevParameters(NULL),
470 m_bIsVideo(false),
471 m_bIs4KVideo(false),
472 m_bEisSupportedSize(false),
473 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800474 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700475 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700476 mShutterDispatcher(this),
477 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 mMinProcessedFrameDuration(0),
479 mMinJpegFrameDuration(0),
480 mMinRawFrameDuration(0),
481 mMetaFrameCount(0U),
482 mUpdateDebugLevel(false),
483 mCallbacks(callbacks),
484 mCaptureIntent(0),
485 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700486 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800487 /* DevCamDebug metadata internal m control*/
488 mDevCamDebugMetaEnable(0),
489 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700490 mBatchSize(0),
491 mToBeQueuedVidBufs(0),
492 mHFRVideoFps(DEFAULT_VIDEO_FPS),
493 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800494 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800495 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700496 mFirstFrameNumberInBatch(0),
497 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800498 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700499 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
500 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000501 mPDSupported(false),
502 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700503 mInstantAEC(false),
504 mResetInstantAEC(false),
505 mInstantAECSettledFrameNumber(0),
506 mAecSkipDisplayFrameBound(0),
507 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800508 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700509 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700510 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700511 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700512 mState(CLOSED),
513 mIsDeviceLinked(false),
514 mIsMainCamera(true),
515 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700516 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800517 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800518 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700519 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800520 mIsApInputUsedForHdrPlus(false),
521 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800522 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700523{
524 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700525 mCommon.init(gCamCapability[cameraId]);
526 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700527#ifndef USE_HAL_3_3
528 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
529#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700530 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700531#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700532 mCameraDevice.common.close = close_camera_device;
533 mCameraDevice.ops = &mCameraOps;
534 mCameraDevice.priv = this;
535 gCamCapability[cameraId]->version = CAM_HAL_V3;
536 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
537 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
538 gCamCapability[cameraId]->min_num_pp_bufs = 3;
539
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800540 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700541
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800542 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700543 mPendingLiveRequest = 0;
544 mCurrentRequestId = -1;
545 pthread_mutex_init(&mMutex, NULL);
546
547 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
548 mDefaultMetadata[i] = NULL;
549
550 // Getting system props of different kinds
551 char prop[PROPERTY_VALUE_MAX];
552 memset(prop, 0, sizeof(prop));
553 property_get("persist.camera.raw.dump", prop, "0");
554 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800555 property_get("persist.camera.hal3.force.hdr", prop, "0");
556 mForceHdrSnapshot = atoi(prop);
557
Thierry Strudel3d639192016-09-09 11:52:26 -0700558 if (mEnableRawDump)
559 LOGD("Raw dump from Camera HAL enabled");
560
561 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
562 memset(mLdafCalib, 0, sizeof(mLdafCalib));
563
564 memset(prop, 0, sizeof(prop));
565 property_get("persist.camera.tnr.preview", prop, "0");
566 m_bTnrPreview = (uint8_t)atoi(prop);
567
568 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800569 property_get("persist.camera.swtnr.preview", prop, "1");
570 m_bSwTnrPreview = (uint8_t)atoi(prop);
571
572 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700573 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700574 m_bTnrVideo = (uint8_t)atoi(prop);
575
576 memset(prop, 0, sizeof(prop));
577 property_get("persist.camera.avtimer.debug", prop, "0");
578 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800579 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700580
Thierry Strudel54dc9782017-02-15 12:12:10 -0800581 memset(prop, 0, sizeof(prop));
582 property_get("persist.camera.cacmode.disable", prop, "0");
583 m_cacModeDisabled = (uint8_t)atoi(prop);
584
Thierry Strudel3d639192016-09-09 11:52:26 -0700585 //Load and read GPU library.
586 lib_surface_utils = NULL;
587 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700588 mSurfaceStridePadding = CAM_PAD_TO_64;
589#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700590 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
591 if (lib_surface_utils) {
592 *(void **)&LINK_get_surface_pixel_alignment =
593 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
594 if (LINK_get_surface_pixel_alignment) {
595 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
596 }
597 dlclose(lib_surface_utils);
598 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700599#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000600 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
601 mPDSupported = (0 <= mPDIndex) ? true : false;
602
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700603 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700604}
605
606/*===========================================================================
607 * FUNCTION : ~QCamera3HardwareInterface
608 *
609 * DESCRIPTION: destructor of QCamera3HardwareInterface
610 *
611 * PARAMETERS : none
612 *
613 * RETURN : none
614 *==========================================================================*/
615QCamera3HardwareInterface::~QCamera3HardwareInterface()
616{
617 LOGD("E");
618
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800619 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700620
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800621 // Disable power hint and enable the perf lock for close camera
622 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
623 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
624
625 // unlink of dualcam during close camera
626 if (mIsDeviceLinked) {
627 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
628 &m_pDualCamCmdPtr->bundle_info;
629 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
630 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
631 pthread_mutex_lock(&gCamLock);
632
633 if (mIsMainCamera == 1) {
634 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
635 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
636 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
637 // related session id should be session id of linked session
638 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
639 } else {
640 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
641 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
642 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
643 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
644 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800645 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800646 pthread_mutex_unlock(&gCamLock);
647
648 rc = mCameraHandle->ops->set_dual_cam_cmd(
649 mCameraHandle->camera_handle);
650 if (rc < 0) {
651 LOGE("Dualcam: Unlink failed, but still proceed to close");
652 }
653 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700654
655 /* We need to stop all streams before deleting any stream */
656 if (mRawDumpChannel) {
657 mRawDumpChannel->stop();
658 }
659
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700660 if (mHdrPlusRawSrcChannel) {
661 mHdrPlusRawSrcChannel->stop();
662 }
663
Thierry Strudel3d639192016-09-09 11:52:26 -0700664 // NOTE: 'camera3_stream_t *' objects are already freed at
665 // this stage by the framework
666 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
667 it != mStreamInfo.end(); it++) {
668 QCamera3ProcessingChannel *channel = (*it)->channel;
669 if (channel) {
670 channel->stop();
671 }
672 }
673 if (mSupportChannel)
674 mSupportChannel->stop();
675
676 if (mAnalysisChannel) {
677 mAnalysisChannel->stop();
678 }
679 if (mMetadataChannel) {
680 mMetadataChannel->stop();
681 }
682 if (mChannelHandle) {
683 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
684 mChannelHandle);
685 LOGD("stopping channel %d", mChannelHandle);
686 }
687
688 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
689 it != mStreamInfo.end(); it++) {
690 QCamera3ProcessingChannel *channel = (*it)->channel;
691 if (channel)
692 delete channel;
693 free (*it);
694 }
695 if (mSupportChannel) {
696 delete mSupportChannel;
697 mSupportChannel = NULL;
698 }
699
700 if (mAnalysisChannel) {
701 delete mAnalysisChannel;
702 mAnalysisChannel = NULL;
703 }
704 if (mRawDumpChannel) {
705 delete mRawDumpChannel;
706 mRawDumpChannel = NULL;
707 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700708 if (mHdrPlusRawSrcChannel) {
709 delete mHdrPlusRawSrcChannel;
710 mHdrPlusRawSrcChannel = NULL;
711 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700712 if (mDummyBatchChannel) {
713 delete mDummyBatchChannel;
714 mDummyBatchChannel = NULL;
715 }
716
717 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800718 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700719
720 if (mMetadataChannel) {
721 delete mMetadataChannel;
722 mMetadataChannel = NULL;
723 }
724
725 /* Clean up all channels */
726 if (mCameraInitialized) {
727 if(!mFirstConfiguration){
728 //send the last unconfigure
729 cam_stream_size_info_t stream_config_info;
730 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
731 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
732 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800733 m_bIs4KVideo ? 0 :
734 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700735 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700736 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
737 stream_config_info);
738 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
739 if (rc < 0) {
740 LOGE("set_parms failed for unconfigure");
741 }
742 }
743 deinitParameters();
744 }
745
746 if (mChannelHandle) {
747 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
748 mChannelHandle);
749 LOGH("deleting channel %d", mChannelHandle);
750 mChannelHandle = 0;
751 }
752
753 if (mState != CLOSED)
754 closeCamera();
755
756 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
757 req.mPendingBufferList.clear();
758 }
759 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700760 for (pendingRequestIterator i = mPendingRequestsList.begin();
761 i != mPendingRequestsList.end();) {
762 i = erasePendingRequest(i);
763 }
764 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
765 if (mDefaultMetadata[i])
766 free_camera_metadata(mDefaultMetadata[i]);
767
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800768 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700769
770 pthread_cond_destroy(&mRequestCond);
771
772 pthread_cond_destroy(&mBuffersCond);
773
774 pthread_mutex_destroy(&mMutex);
775 LOGD("X");
776}
777
778/*===========================================================================
779 * FUNCTION : erasePendingRequest
780 *
781 * DESCRIPTION: function to erase a desired pending request after freeing any
782 * allocated memory
783 *
784 * PARAMETERS :
785 * @i : iterator pointing to pending request to be erased
786 *
787 * RETURN : iterator pointing to the next request
788 *==========================================================================*/
789QCamera3HardwareInterface::pendingRequestIterator
790 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
791{
792 if (i->input_buffer != NULL) {
793 free(i->input_buffer);
794 i->input_buffer = NULL;
795 }
796 if (i->settings != NULL)
797 free_camera_metadata((camera_metadata_t*)i->settings);
798 return mPendingRequestsList.erase(i);
799}
800
801/*===========================================================================
802 * FUNCTION : camEvtHandle
803 *
804 * DESCRIPTION: Function registered to mm-camera-interface to handle events
805 *
806 * PARAMETERS :
807 * @camera_handle : interface layer camera handle
808 * @evt : ptr to event
809 * @user_data : user data ptr
810 *
811 * RETURN : none
812 *==========================================================================*/
813void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
814 mm_camera_event_t *evt,
815 void *user_data)
816{
817 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
818 if (obj && evt) {
819 switch(evt->server_event_type) {
820 case CAM_EVENT_TYPE_DAEMON_DIED:
821 pthread_mutex_lock(&obj->mMutex);
822 obj->mState = ERROR;
823 pthread_mutex_unlock(&obj->mMutex);
824 LOGE("Fatal, camera daemon died");
825 break;
826
827 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
828 LOGD("HAL got request pull from Daemon");
829 pthread_mutex_lock(&obj->mMutex);
830 obj->mWokenUpByDaemon = true;
831 obj->unblockRequestIfNecessary();
832 pthread_mutex_unlock(&obj->mMutex);
833 break;
834
835 default:
836 LOGW("Warning: Unhandled event %d",
837 evt->server_event_type);
838 break;
839 }
840 } else {
841 LOGE("NULL user_data/evt");
842 }
843}
844
845/*===========================================================================
846 * FUNCTION : openCamera
847 *
848 * DESCRIPTION: open camera
849 *
850 * PARAMETERS :
851 * @hw_device : double ptr for camera device struct
852 *
853 * RETURN : int32_t type of status
854 * NO_ERROR -- success
855 * none-zero failure code
856 *==========================================================================*/
857int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
858{
859 int rc = 0;
860 if (mState != CLOSED) {
861 *hw_device = NULL;
862 return PERMISSION_DENIED;
863 }
864
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700865 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800866 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700867 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
868 mCameraId);
869
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700870 if (mCameraHandle) {
871 LOGE("Failure: Camera already opened");
872 return ALREADY_EXISTS;
873 }
874
875 {
876 Mutex::Autolock l(gHdrPlusClientLock);
877 if (gEaselManagerClient.isEaselPresentOnDevice()) {
878 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
879 rc = gEaselManagerClient.resume();
880 if (rc != 0) {
881 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
882 return rc;
883 }
884 }
885 }
886
Thierry Strudel3d639192016-09-09 11:52:26 -0700887 rc = openCamera();
888 if (rc == 0) {
889 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800890 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700891 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700892
893 // Suspend Easel because opening camera failed.
894 {
895 Mutex::Autolock l(gHdrPlusClientLock);
896 if (gEaselManagerClient.isEaselPresentOnDevice()) {
897 status_t suspendErr = gEaselManagerClient.suspend();
898 if (suspendErr != 0) {
899 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
900 strerror(-suspendErr), suspendErr);
901 }
902 }
903 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800904 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700905
Thierry Strudel3d639192016-09-09 11:52:26 -0700906 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
907 mCameraId, rc);
908
909 if (rc == NO_ERROR) {
910 mState = OPENED;
911 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800912
Thierry Strudel3d639192016-09-09 11:52:26 -0700913 return rc;
914}
915
916/*===========================================================================
917 * FUNCTION : openCamera
918 *
919 * DESCRIPTION: open camera
920 *
921 * PARAMETERS : none
922 *
923 * RETURN : int32_t type of status
924 * NO_ERROR -- success
925 * none-zero failure code
926 *==========================================================================*/
927int QCamera3HardwareInterface::openCamera()
928{
929 int rc = 0;
930 char value[PROPERTY_VALUE_MAX];
931
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800932 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800933
Thierry Strudel3d639192016-09-09 11:52:26 -0700934 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
935 if (rc < 0) {
936 LOGE("Failed to reserve flash for camera id: %d",
937 mCameraId);
938 return UNKNOWN_ERROR;
939 }
940
941 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
942 if (rc) {
943 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
944 return rc;
945 }
946
947 if (!mCameraHandle) {
948 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
949 return -ENODEV;
950 }
951
952 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
953 camEvtHandle, (void *)this);
954
955 if (rc < 0) {
956 LOGE("Error, failed to register event callback");
957 /* Not closing camera here since it is already handled in destructor */
958 return FAILED_TRANSACTION;
959 }
960
961 mExifParams.debug_params =
962 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
963 if (mExifParams.debug_params) {
964 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
965 } else {
966 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
967 return NO_MEMORY;
968 }
969 mFirstConfiguration = true;
970
971 //Notify display HAL that a camera session is active.
972 //But avoid calling the same during bootup because camera service might open/close
973 //cameras at boot time during its initialization and display service will also internally
974 //wait for camera service to initialize first while calling this display API, resulting in a
975 //deadlock situation. Since boot time camera open/close calls are made only to fetch
976 //capabilities, no need of this display bw optimization.
977 //Use "service.bootanim.exit" property to know boot status.
978 property_get("service.bootanim.exit", value, "0");
979 if (atoi(value) == 1) {
980 pthread_mutex_lock(&gCamLock);
981 if (gNumCameraSessions++ == 0) {
982 setCameraLaunchStatus(true);
983 }
984 pthread_mutex_unlock(&gCamLock);
985 }
986
987 //fill the session id needed while linking dual cam
988 pthread_mutex_lock(&gCamLock);
989 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
990 &sessionId[mCameraId]);
991 pthread_mutex_unlock(&gCamLock);
992
993 if (rc < 0) {
994 LOGE("Error, failed to get sessiion id");
995 return UNKNOWN_ERROR;
996 } else {
997 //Allocate related cam sync buffer
998 //this is needed for the payload that goes along with bundling cmd for related
999 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001000 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1001 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001002 if(rc != OK) {
1003 rc = NO_MEMORY;
1004 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1005 return NO_MEMORY;
1006 }
1007
1008 //Map memory for related cam sync buffer
1009 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001010 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1011 m_pDualCamCmdHeap->getFd(0),
1012 sizeof(cam_dual_camera_cmd_info_t),
1013 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001014 if(rc < 0) {
1015 LOGE("Dualcam: failed to map Related cam sync buffer");
1016 rc = FAILED_TRANSACTION;
1017 return NO_MEMORY;
1018 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001019 m_pDualCamCmdPtr =
1020 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001021 }
1022
1023 LOGH("mCameraId=%d",mCameraId);
1024
1025 return NO_ERROR;
1026}
1027
1028/*===========================================================================
1029 * FUNCTION : closeCamera
1030 *
1031 * DESCRIPTION: close camera
1032 *
1033 * PARAMETERS : none
1034 *
1035 * RETURN : int32_t type of status
1036 * NO_ERROR -- success
1037 * none-zero failure code
1038 *==========================================================================*/
1039int QCamera3HardwareInterface::closeCamera()
1040{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001041 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001042 int rc = NO_ERROR;
1043 char value[PROPERTY_VALUE_MAX];
1044
1045 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1046 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001047
1048 // unmap memory for related cam sync buffer
1049 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001050 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001051 if (NULL != m_pDualCamCmdHeap) {
1052 m_pDualCamCmdHeap->deallocate();
1053 delete m_pDualCamCmdHeap;
1054 m_pDualCamCmdHeap = NULL;
1055 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001056 }
1057
Thierry Strudel3d639192016-09-09 11:52:26 -07001058 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1059 mCameraHandle = NULL;
1060
1061 //reset session id to some invalid id
1062 pthread_mutex_lock(&gCamLock);
1063 sessionId[mCameraId] = 0xDEADBEEF;
1064 pthread_mutex_unlock(&gCamLock);
1065
1066 //Notify display HAL that there is no active camera session
1067 //but avoid calling the same during bootup. Refer to openCamera
1068 //for more details.
1069 property_get("service.bootanim.exit", value, "0");
1070 if (atoi(value) == 1) {
1071 pthread_mutex_lock(&gCamLock);
1072 if (--gNumCameraSessions == 0) {
1073 setCameraLaunchStatus(false);
1074 }
1075 pthread_mutex_unlock(&gCamLock);
1076 }
1077
Thierry Strudel3d639192016-09-09 11:52:26 -07001078 if (mExifParams.debug_params) {
1079 free(mExifParams.debug_params);
1080 mExifParams.debug_params = NULL;
1081 }
1082 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1083 LOGW("Failed to release flash for camera id: %d",
1084 mCameraId);
1085 }
1086 mState = CLOSED;
1087 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1088 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001089
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001090 {
1091 Mutex::Autolock l(gHdrPlusClientLock);
1092 if (gHdrPlusClient != nullptr) {
1093 // Disable HDR+ mode.
1094 disableHdrPlusModeLocked();
1095 // Disconnect Easel if it's connected.
1096 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1097 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001098 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001099
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001100 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001101 rc = gEaselManagerClient.stopMipi(mCameraId);
1102 if (rc != 0) {
1103 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1104 }
1105
1106 rc = gEaselManagerClient.suspend();
1107 if (rc != 0) {
1108 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1109 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001110 }
1111 }
1112
Thierry Strudel3d639192016-09-09 11:52:26 -07001113 return rc;
1114}
1115
1116/*===========================================================================
1117 * FUNCTION : initialize
1118 *
1119 * DESCRIPTION: Initialize frameworks callback functions
1120 *
1121 * PARAMETERS :
1122 * @callback_ops : callback function to frameworks
1123 *
1124 * RETURN :
1125 *
1126 *==========================================================================*/
1127int QCamera3HardwareInterface::initialize(
1128 const struct camera3_callback_ops *callback_ops)
1129{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001130 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001131 int rc;
1132
1133 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1134 pthread_mutex_lock(&mMutex);
1135
1136 // Validate current state
1137 switch (mState) {
1138 case OPENED:
1139 /* valid state */
1140 break;
1141 default:
1142 LOGE("Invalid state %d", mState);
1143 rc = -ENODEV;
1144 goto err1;
1145 }
1146
1147 rc = initParameters();
1148 if (rc < 0) {
1149 LOGE("initParamters failed %d", rc);
1150 goto err1;
1151 }
1152 mCallbackOps = callback_ops;
1153
1154 mChannelHandle = mCameraHandle->ops->add_channel(
1155 mCameraHandle->camera_handle, NULL, NULL, this);
1156 if (mChannelHandle == 0) {
1157 LOGE("add_channel failed");
1158 rc = -ENOMEM;
1159 pthread_mutex_unlock(&mMutex);
1160 return rc;
1161 }
1162
1163 pthread_mutex_unlock(&mMutex);
1164 mCameraInitialized = true;
1165 mState = INITIALIZED;
1166 LOGI("X");
1167 return 0;
1168
1169err1:
1170 pthread_mutex_unlock(&mMutex);
1171 return rc;
1172}
1173
1174/*===========================================================================
1175 * FUNCTION : validateStreamDimensions
1176 *
1177 * DESCRIPTION: Check if the configuration requested are those advertised
1178 *
1179 * PARAMETERS :
1180 * @stream_list : streams to be configured
1181 *
1182 * RETURN :
1183 *
1184 *==========================================================================*/
1185int QCamera3HardwareInterface::validateStreamDimensions(
1186 camera3_stream_configuration_t *streamList)
1187{
1188 int rc = NO_ERROR;
1189 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001190 uint32_t depthWidth = 0;
1191 uint32_t depthHeight = 0;
1192 if (mPDSupported) {
1193 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1194 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1195 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001196
1197 camera3_stream_t *inputStream = NULL;
1198 /*
1199 * Loop through all streams to find input stream if it exists*
1200 */
1201 for (size_t i = 0; i< streamList->num_streams; i++) {
1202 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1203 if (inputStream != NULL) {
1204 LOGE("Error, Multiple input streams requested");
1205 return -EINVAL;
1206 }
1207 inputStream = streamList->streams[i];
1208 }
1209 }
1210 /*
1211 * Loop through all streams requested in configuration
1212 * Check if unsupported sizes have been requested on any of them
1213 */
1214 for (size_t j = 0; j < streamList->num_streams; j++) {
1215 bool sizeFound = false;
1216 camera3_stream_t *newStream = streamList->streams[j];
1217
1218 uint32_t rotatedHeight = newStream->height;
1219 uint32_t rotatedWidth = newStream->width;
1220 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1221 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1222 rotatedHeight = newStream->width;
1223 rotatedWidth = newStream->height;
1224 }
1225
1226 /*
1227 * Sizes are different for each type of stream format check against
1228 * appropriate table.
1229 */
1230 switch (newStream->format) {
1231 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1232 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1233 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001234 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1235 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1236 mPDSupported) {
1237 if ((depthWidth == newStream->width) &&
1238 (depthHeight == newStream->height)) {
1239 sizeFound = true;
1240 }
1241 break;
1242 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001243 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1244 for (size_t i = 0; i < count; i++) {
1245 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1246 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1247 sizeFound = true;
1248 break;
1249 }
1250 }
1251 break;
1252 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001253 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1254 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001255 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001256 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001257 if ((depthSamplesCount == newStream->width) &&
1258 (1 == newStream->height)) {
1259 sizeFound = true;
1260 }
1261 break;
1262 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001263 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1264 /* Verify set size against generated sizes table */
1265 for (size_t i = 0; i < count; i++) {
1266 if (((int32_t)rotatedWidth ==
1267 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1268 ((int32_t)rotatedHeight ==
1269 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1270 sizeFound = true;
1271 break;
1272 }
1273 }
1274 break;
1275 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1276 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1277 default:
1278 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1279 || newStream->stream_type == CAMERA3_STREAM_INPUT
1280 || IS_USAGE_ZSL(newStream->usage)) {
1281 if (((int32_t)rotatedWidth ==
1282 gCamCapability[mCameraId]->active_array_size.width) &&
1283 ((int32_t)rotatedHeight ==
1284 gCamCapability[mCameraId]->active_array_size.height)) {
1285 sizeFound = true;
1286 break;
1287 }
1288 /* We could potentially break here to enforce ZSL stream
1289 * set from frameworks always is full active array size
1290 * but it is not clear from the spc if framework will always
1291 * follow that, also we have logic to override to full array
1292 * size, so keeping the logic lenient at the moment
1293 */
1294 }
1295 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1296 MAX_SIZES_CNT);
1297 for (size_t i = 0; i < count; i++) {
1298 if (((int32_t)rotatedWidth ==
1299 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1300 ((int32_t)rotatedHeight ==
1301 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1302 sizeFound = true;
1303 break;
1304 }
1305 }
1306 break;
1307 } /* End of switch(newStream->format) */
1308
1309 /* We error out even if a single stream has unsupported size set */
1310 if (!sizeFound) {
1311 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1312 rotatedWidth, rotatedHeight, newStream->format,
1313 gCamCapability[mCameraId]->active_array_size.width,
1314 gCamCapability[mCameraId]->active_array_size.height);
1315 rc = -EINVAL;
1316 break;
1317 }
1318 } /* End of for each stream */
1319 return rc;
1320}
1321
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001322/*===========================================================================
1323 * FUNCTION : validateUsageFlags
1324 *
1325 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1326 *
1327 * PARAMETERS :
1328 * @stream_list : streams to be configured
1329 *
1330 * RETURN :
1331 * NO_ERROR if the usage flags are supported
1332 * error code if usage flags are not supported
1333 *
1334 *==========================================================================*/
1335int QCamera3HardwareInterface::validateUsageFlags(
1336 const camera3_stream_configuration_t* streamList)
1337{
1338 for (size_t j = 0; j < streamList->num_streams; j++) {
1339 const camera3_stream_t *newStream = streamList->streams[j];
1340
1341 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1342 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1343 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1344 continue;
1345 }
1346
Jason Leec4cf5032017-05-24 18:31:41 -07001347 // Here we only care whether it's EIS3 or not
1348 char is_type_value[PROPERTY_VALUE_MAX];
1349 property_get("persist.camera.is_type", is_type_value, "4");
1350 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1351 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1352 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1353 isType = IS_TYPE_NONE;
1354
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001355 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1356 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1357 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1358 bool forcePreviewUBWC = true;
1359 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1360 forcePreviewUBWC = false;
1361 }
1362 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001363 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001364 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001365 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001366 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001367 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001368
1369 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1370 // So color spaces will always match.
1371
1372 // Check whether underlying formats of shared streams match.
1373 if (isVideo && isPreview && videoFormat != previewFormat) {
1374 LOGE("Combined video and preview usage flag is not supported");
1375 return -EINVAL;
1376 }
1377 if (isPreview && isZSL && previewFormat != zslFormat) {
1378 LOGE("Combined preview and zsl usage flag is not supported");
1379 return -EINVAL;
1380 }
1381 if (isVideo && isZSL && videoFormat != zslFormat) {
1382 LOGE("Combined video and zsl usage flag is not supported");
1383 return -EINVAL;
1384 }
1385 }
1386 return NO_ERROR;
1387}
1388
1389/*===========================================================================
1390 * FUNCTION : validateUsageFlagsForEis
1391 *
1392 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1393 *
1394 * PARAMETERS :
1395 * @stream_list : streams to be configured
1396 *
1397 * RETURN :
1398 * NO_ERROR if the usage flags are supported
1399 * error code if usage flags are not supported
1400 *
1401 *==========================================================================*/
1402int QCamera3HardwareInterface::validateUsageFlagsForEis(
1403 const camera3_stream_configuration_t* streamList)
1404{
1405 for (size_t j = 0; j < streamList->num_streams; j++) {
1406 const camera3_stream_t *newStream = streamList->streams[j];
1407
1408 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1409 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1410
1411 // Because EIS is "hard-coded" for certain use case, and current
1412 // implementation doesn't support shared preview and video on the same
1413 // stream, return failure if EIS is forced on.
1414 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1415 LOGE("Combined video and preview usage flag is not supported due to EIS");
1416 return -EINVAL;
1417 }
1418 }
1419 return NO_ERROR;
1420}
1421
Thierry Strudel3d639192016-09-09 11:52:26 -07001422/*==============================================================================
1423 * FUNCTION : isSupportChannelNeeded
1424 *
1425 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1426 *
1427 * PARAMETERS :
1428 * @stream_list : streams to be configured
1429 * @stream_config_info : the config info for streams to be configured
1430 *
1431 * RETURN : Boolen true/false decision
1432 *
1433 *==========================================================================*/
1434bool QCamera3HardwareInterface::isSupportChannelNeeded(
1435 camera3_stream_configuration_t *streamList,
1436 cam_stream_size_info_t stream_config_info)
1437{
1438 uint32_t i;
1439 bool pprocRequested = false;
1440 /* Check for conditions where PProc pipeline does not have any streams*/
1441 for (i = 0; i < stream_config_info.num_streams; i++) {
1442 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1443 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1444 pprocRequested = true;
1445 break;
1446 }
1447 }
1448
1449 if (pprocRequested == false )
1450 return true;
1451
1452 /* Dummy stream needed if only raw or jpeg streams present */
1453 for (i = 0; i < streamList->num_streams; i++) {
1454 switch(streamList->streams[i]->format) {
1455 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1456 case HAL_PIXEL_FORMAT_RAW10:
1457 case HAL_PIXEL_FORMAT_RAW16:
1458 case HAL_PIXEL_FORMAT_BLOB:
1459 break;
1460 default:
1461 return false;
1462 }
1463 }
1464 return true;
1465}
1466
1467/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001468 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001469 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001470 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001471 *
1472 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001473 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001474 *
1475 * RETURN : int32_t type of status
1476 * NO_ERROR -- success
1477 * none-zero failure code
1478 *
1479 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001480int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001481{
1482 int32_t rc = NO_ERROR;
1483
1484 cam_dimension_t max_dim = {0, 0};
1485 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1486 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1487 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1488 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1489 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1490 }
1491
1492 clear_metadata_buffer(mParameters);
1493
1494 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1495 max_dim);
1496 if (rc != NO_ERROR) {
1497 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1498 return rc;
1499 }
1500
1501 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1502 if (rc != NO_ERROR) {
1503 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1504 return rc;
1505 }
1506
1507 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001508 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001509
1510 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1511 mParameters);
1512 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001513 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001514 return rc;
1515 }
1516
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001517 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001518 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1519 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1520 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1521 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1522 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001523
1524 return rc;
1525}
1526
1527/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001528 * FUNCTION : addToPPFeatureMask
1529 *
1530 * DESCRIPTION: add additional features to pp feature mask based on
1531 * stream type and usecase
1532 *
1533 * PARAMETERS :
1534 * @stream_format : stream type for feature mask
1535 * @stream_idx : stream idx within postprocess_mask list to change
1536 *
1537 * RETURN : NULL
1538 *
1539 *==========================================================================*/
1540void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1541 uint32_t stream_idx)
1542{
1543 char feature_mask_value[PROPERTY_VALUE_MAX];
1544 cam_feature_mask_t feature_mask;
1545 int args_converted;
1546 int property_len;
1547
1548 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001549#ifdef _LE_CAMERA_
1550 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1551 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1552 property_len = property_get("persist.camera.hal3.feature",
1553 feature_mask_value, swtnr_feature_mask_value);
1554#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001555 property_len = property_get("persist.camera.hal3.feature",
1556 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001557#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001558 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1559 (feature_mask_value[1] == 'x')) {
1560 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1561 } else {
1562 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1563 }
1564 if (1 != args_converted) {
1565 feature_mask = 0;
1566 LOGE("Wrong feature mask %s", feature_mask_value);
1567 return;
1568 }
1569
1570 switch (stream_format) {
1571 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1572 /* Add LLVD to pp feature mask only if video hint is enabled */
1573 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1574 mStreamConfigInfo.postprocess_mask[stream_idx]
1575 |= CAM_QTI_FEATURE_SW_TNR;
1576 LOGH("Added SW TNR to pp feature mask");
1577 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1578 mStreamConfigInfo.postprocess_mask[stream_idx]
1579 |= CAM_QCOM_FEATURE_LLVD;
1580 LOGH("Added LLVD SeeMore to pp feature mask");
1581 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001582 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1583 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1584 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1585 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001586 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1587 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1588 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1589 CAM_QTI_FEATURE_BINNING_CORRECTION;
1590 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001591 break;
1592 }
1593 default:
1594 break;
1595 }
1596 LOGD("PP feature mask %llx",
1597 mStreamConfigInfo.postprocess_mask[stream_idx]);
1598}
1599
1600/*==============================================================================
1601 * FUNCTION : updateFpsInPreviewBuffer
1602 *
1603 * DESCRIPTION: update FPS information in preview buffer.
1604 *
1605 * PARAMETERS :
1606 * @metadata : pointer to metadata buffer
1607 * @frame_number: frame_number to look for in pending buffer list
1608 *
1609 * RETURN : None
1610 *
1611 *==========================================================================*/
1612void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1613 uint32_t frame_number)
1614{
1615 // Mark all pending buffers for this particular request
1616 // with corresponding framerate information
1617 for (List<PendingBuffersInRequest>::iterator req =
1618 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1619 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1620 for(List<PendingBufferInfo>::iterator j =
1621 req->mPendingBufferList.begin();
1622 j != req->mPendingBufferList.end(); j++) {
1623 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1624 if ((req->frame_number == frame_number) &&
1625 (channel->getStreamTypeMask() &
1626 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1627 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1628 CAM_INTF_PARM_FPS_RANGE, metadata) {
1629 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1630 struct private_handle_t *priv_handle =
1631 (struct private_handle_t *)(*(j->buffer));
1632 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1633 }
1634 }
1635 }
1636 }
1637}
1638
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001639/*==============================================================================
1640 * FUNCTION : updateTimeStampInPendingBuffers
1641 *
1642 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1643 * of a frame number
1644 *
1645 * PARAMETERS :
1646 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1647 * @timestamp : timestamp to be set
1648 *
1649 * RETURN : None
1650 *
1651 *==========================================================================*/
1652void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1653 uint32_t frameNumber, nsecs_t timestamp)
1654{
1655 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1656 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1657 if (req->frame_number != frameNumber)
1658 continue;
1659
1660 for (auto k = req->mPendingBufferList.begin();
1661 k != req->mPendingBufferList.end(); k++ ) {
1662 struct private_handle_t *priv_handle =
1663 (struct private_handle_t *) (*(k->buffer));
1664 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1665 }
1666 }
1667 return;
1668}
1669
Thierry Strudel3d639192016-09-09 11:52:26 -07001670/*===========================================================================
1671 * FUNCTION : configureStreams
1672 *
1673 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1674 * and output streams.
1675 *
1676 * PARAMETERS :
1677 * @stream_list : streams to be configured
1678 *
1679 * RETURN :
1680 *
1681 *==========================================================================*/
1682int QCamera3HardwareInterface::configureStreams(
1683 camera3_stream_configuration_t *streamList)
1684{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001685 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001686 int rc = 0;
1687
1688 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001689 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001690 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001691 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001692
1693 return rc;
1694}
1695
1696/*===========================================================================
1697 * FUNCTION : configureStreamsPerfLocked
1698 *
1699 * DESCRIPTION: configureStreams while perfLock is held.
1700 *
1701 * PARAMETERS :
1702 * @stream_list : streams to be configured
1703 *
1704 * RETURN : int32_t type of status
1705 * NO_ERROR -- success
1706 * none-zero failure code
1707 *==========================================================================*/
1708int QCamera3HardwareInterface::configureStreamsPerfLocked(
1709 camera3_stream_configuration_t *streamList)
1710{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001711 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001712 int rc = 0;
1713
1714 // Sanity check stream_list
1715 if (streamList == NULL) {
1716 LOGE("NULL stream configuration");
1717 return BAD_VALUE;
1718 }
1719 if (streamList->streams == NULL) {
1720 LOGE("NULL stream list");
1721 return BAD_VALUE;
1722 }
1723
1724 if (streamList->num_streams < 1) {
1725 LOGE("Bad number of streams requested: %d",
1726 streamList->num_streams);
1727 return BAD_VALUE;
1728 }
1729
1730 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1731 LOGE("Maximum number of streams %d exceeded: %d",
1732 MAX_NUM_STREAMS, streamList->num_streams);
1733 return BAD_VALUE;
1734 }
1735
Jason Leec4cf5032017-05-24 18:31:41 -07001736 mOpMode = streamList->operation_mode;
1737 LOGD("mOpMode: %d", mOpMode);
1738
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001739 rc = validateUsageFlags(streamList);
1740 if (rc != NO_ERROR) {
1741 return rc;
1742 }
1743
Thierry Strudel3d639192016-09-09 11:52:26 -07001744 /* first invalidate all the steams in the mStreamList
1745 * if they appear again, they will be validated */
1746 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1747 it != mStreamInfo.end(); it++) {
1748 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1749 if (channel) {
1750 channel->stop();
1751 }
1752 (*it)->status = INVALID;
1753 }
1754
1755 if (mRawDumpChannel) {
1756 mRawDumpChannel->stop();
1757 delete mRawDumpChannel;
1758 mRawDumpChannel = NULL;
1759 }
1760
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001761 if (mHdrPlusRawSrcChannel) {
1762 mHdrPlusRawSrcChannel->stop();
1763 delete mHdrPlusRawSrcChannel;
1764 mHdrPlusRawSrcChannel = NULL;
1765 }
1766
Thierry Strudel3d639192016-09-09 11:52:26 -07001767 if (mSupportChannel)
1768 mSupportChannel->stop();
1769
1770 if (mAnalysisChannel) {
1771 mAnalysisChannel->stop();
1772 }
1773 if (mMetadataChannel) {
1774 /* If content of mStreamInfo is not 0, there is metadata stream */
1775 mMetadataChannel->stop();
1776 }
1777 if (mChannelHandle) {
1778 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1779 mChannelHandle);
1780 LOGD("stopping channel %d", mChannelHandle);
1781 }
1782
1783 pthread_mutex_lock(&mMutex);
1784
1785 // Check state
1786 switch (mState) {
1787 case INITIALIZED:
1788 case CONFIGURED:
1789 case STARTED:
1790 /* valid state */
1791 break;
1792 default:
1793 LOGE("Invalid state %d", mState);
1794 pthread_mutex_unlock(&mMutex);
1795 return -ENODEV;
1796 }
1797
1798 /* Check whether we have video stream */
1799 m_bIs4KVideo = false;
1800 m_bIsVideo = false;
1801 m_bEisSupportedSize = false;
1802 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001803 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001804 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001805 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001806 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001807 uint32_t videoWidth = 0U;
1808 uint32_t videoHeight = 0U;
1809 size_t rawStreamCnt = 0;
1810 size_t stallStreamCnt = 0;
1811 size_t processedStreamCnt = 0;
1812 // Number of streams on ISP encoder path
1813 size_t numStreamsOnEncoder = 0;
1814 size_t numYuv888OnEncoder = 0;
1815 bool bYuv888OverrideJpeg = false;
1816 cam_dimension_t largeYuv888Size = {0, 0};
1817 cam_dimension_t maxViewfinderSize = {0, 0};
1818 bool bJpegExceeds4K = false;
1819 bool bJpegOnEncoder = false;
1820 bool bUseCommonFeatureMask = false;
1821 cam_feature_mask_t commonFeatureMask = 0;
1822 bool bSmallJpegSize = false;
1823 uint32_t width_ratio;
1824 uint32_t height_ratio;
1825 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1826 camera3_stream_t *inputStream = NULL;
1827 bool isJpeg = false;
1828 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001829 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001830 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001831
1832 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1833
1834 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001835 uint8_t eis_prop_set;
1836 uint32_t maxEisWidth = 0;
1837 uint32_t maxEisHeight = 0;
1838
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001839 // Initialize all instant AEC related variables
1840 mInstantAEC = false;
1841 mResetInstantAEC = false;
1842 mInstantAECSettledFrameNumber = 0;
1843 mAecSkipDisplayFrameBound = 0;
1844 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001845 mCurrFeatureState = 0;
1846 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001847
Thierry Strudel3d639192016-09-09 11:52:26 -07001848 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1849
1850 size_t count = IS_TYPE_MAX;
1851 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1852 for (size_t i = 0; i < count; i++) {
1853 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001854 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1855 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001856 break;
1857 }
1858 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001859
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001860 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001861 maxEisWidth = MAX_EIS_WIDTH;
1862 maxEisHeight = MAX_EIS_HEIGHT;
1863 }
1864
1865 /* EIS setprop control */
1866 char eis_prop[PROPERTY_VALUE_MAX];
1867 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001868 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001869 eis_prop_set = (uint8_t)atoi(eis_prop);
1870
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001871 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001872 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1873
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001874 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1875 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001876
Thierry Strudel3d639192016-09-09 11:52:26 -07001877 /* stream configurations */
1878 for (size_t i = 0; i < streamList->num_streams; i++) {
1879 camera3_stream_t *newStream = streamList->streams[i];
1880 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1881 "height = %d, rotation = %d, usage = 0x%x",
1882 i, newStream->stream_type, newStream->format,
1883 newStream->width, newStream->height, newStream->rotation,
1884 newStream->usage);
1885 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1886 newStream->stream_type == CAMERA3_STREAM_INPUT){
1887 isZsl = true;
1888 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001889 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1890 IS_USAGE_PREVIEW(newStream->usage)) {
1891 isPreview = true;
1892 }
1893
Thierry Strudel3d639192016-09-09 11:52:26 -07001894 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1895 inputStream = newStream;
1896 }
1897
Emilian Peev7650c122017-01-19 08:24:33 -08001898 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1899 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001900 isJpeg = true;
1901 jpegSize.width = newStream->width;
1902 jpegSize.height = newStream->height;
1903 if (newStream->width > VIDEO_4K_WIDTH ||
1904 newStream->height > VIDEO_4K_HEIGHT)
1905 bJpegExceeds4K = true;
1906 }
1907
1908 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1909 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1910 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001911 // In HAL3 we can have multiple different video streams.
1912 // The variables video width and height are used below as
1913 // dimensions of the biggest of them
1914 if (videoWidth < newStream->width ||
1915 videoHeight < newStream->height) {
1916 videoWidth = newStream->width;
1917 videoHeight = newStream->height;
1918 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001919 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1920 (VIDEO_4K_HEIGHT <= newStream->height)) {
1921 m_bIs4KVideo = true;
1922 }
1923 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1924 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001925
Thierry Strudel3d639192016-09-09 11:52:26 -07001926 }
1927 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1928 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1929 switch (newStream->format) {
1930 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001931 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1932 depthPresent = true;
1933 break;
1934 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001935 stallStreamCnt++;
1936 if (isOnEncoder(maxViewfinderSize, newStream->width,
1937 newStream->height)) {
1938 numStreamsOnEncoder++;
1939 bJpegOnEncoder = true;
1940 }
1941 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1942 newStream->width);
1943 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1944 newStream->height);;
1945 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1946 "FATAL: max_downscale_factor cannot be zero and so assert");
1947 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1948 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1949 LOGH("Setting small jpeg size flag to true");
1950 bSmallJpegSize = true;
1951 }
1952 break;
1953 case HAL_PIXEL_FORMAT_RAW10:
1954 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1955 case HAL_PIXEL_FORMAT_RAW16:
1956 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001957 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1958 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1959 pdStatCount++;
1960 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001961 break;
1962 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1963 processedStreamCnt++;
1964 if (isOnEncoder(maxViewfinderSize, newStream->width,
1965 newStream->height)) {
1966 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1967 !IS_USAGE_ZSL(newStream->usage)) {
1968 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1969 }
1970 numStreamsOnEncoder++;
1971 }
1972 break;
1973 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1974 processedStreamCnt++;
1975 if (isOnEncoder(maxViewfinderSize, newStream->width,
1976 newStream->height)) {
1977 // If Yuv888 size is not greater than 4K, set feature mask
1978 // to SUPERSET so that it support concurrent request on
1979 // YUV and JPEG.
1980 if (newStream->width <= VIDEO_4K_WIDTH &&
1981 newStream->height <= VIDEO_4K_HEIGHT) {
1982 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1983 }
1984 numStreamsOnEncoder++;
1985 numYuv888OnEncoder++;
1986 largeYuv888Size.width = newStream->width;
1987 largeYuv888Size.height = newStream->height;
1988 }
1989 break;
1990 default:
1991 processedStreamCnt++;
1992 if (isOnEncoder(maxViewfinderSize, newStream->width,
1993 newStream->height)) {
1994 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1995 numStreamsOnEncoder++;
1996 }
1997 break;
1998 }
1999
2000 }
2001 }
2002
2003 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2004 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2005 !m_bIsVideo) {
2006 m_bEisEnable = false;
2007 }
2008
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002009 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2010 pthread_mutex_unlock(&mMutex);
2011 return -EINVAL;
2012 }
2013
Thierry Strudel54dc9782017-02-15 12:12:10 -08002014 uint8_t forceEnableTnr = 0;
2015 char tnr_prop[PROPERTY_VALUE_MAX];
2016 memset(tnr_prop, 0, sizeof(tnr_prop));
2017 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2018 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2019
Thierry Strudel3d639192016-09-09 11:52:26 -07002020 /* Logic to enable/disable TNR based on specific config size/etc.*/
2021 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002022 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2023 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002024 else if (forceEnableTnr)
2025 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002026
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002027 char videoHdrProp[PROPERTY_VALUE_MAX];
2028 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2029 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2030 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2031
2032 if (hdr_mode_prop == 1 && m_bIsVideo &&
2033 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2034 m_bVideoHdrEnabled = true;
2035 else
2036 m_bVideoHdrEnabled = false;
2037
2038
Thierry Strudel3d639192016-09-09 11:52:26 -07002039 /* Check if num_streams is sane */
2040 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2041 rawStreamCnt > MAX_RAW_STREAMS ||
2042 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2043 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2044 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2045 pthread_mutex_unlock(&mMutex);
2046 return -EINVAL;
2047 }
2048 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002049 if (isZsl && m_bIs4KVideo) {
2050 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002051 pthread_mutex_unlock(&mMutex);
2052 return -EINVAL;
2053 }
2054 /* Check if stream sizes are sane */
2055 if (numStreamsOnEncoder > 2) {
2056 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2057 pthread_mutex_unlock(&mMutex);
2058 return -EINVAL;
2059 } else if (1 < numStreamsOnEncoder){
2060 bUseCommonFeatureMask = true;
2061 LOGH("Multiple streams above max viewfinder size, common mask needed");
2062 }
2063
2064 /* Check if BLOB size is greater than 4k in 4k recording case */
2065 if (m_bIs4KVideo && bJpegExceeds4K) {
2066 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2067 pthread_mutex_unlock(&mMutex);
2068 return -EINVAL;
2069 }
2070
Emilian Peev7650c122017-01-19 08:24:33 -08002071 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2072 depthPresent) {
2073 LOGE("HAL doesn't support depth streams in HFR mode!");
2074 pthread_mutex_unlock(&mMutex);
2075 return -EINVAL;
2076 }
2077
Thierry Strudel3d639192016-09-09 11:52:26 -07002078 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2079 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2080 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2081 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2082 // configurations:
2083 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2084 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2085 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2086 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2087 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2088 __func__);
2089 pthread_mutex_unlock(&mMutex);
2090 return -EINVAL;
2091 }
2092
2093 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2094 // the YUV stream's size is greater or equal to the JPEG size, set common
2095 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2096 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2097 jpegSize.width, jpegSize.height) &&
2098 largeYuv888Size.width > jpegSize.width &&
2099 largeYuv888Size.height > jpegSize.height) {
2100 bYuv888OverrideJpeg = true;
2101 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2102 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2103 }
2104
2105 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2106 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2107 commonFeatureMask);
2108 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2109 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2110
2111 rc = validateStreamDimensions(streamList);
2112 if (rc == NO_ERROR) {
2113 rc = validateStreamRotations(streamList);
2114 }
2115 if (rc != NO_ERROR) {
2116 LOGE("Invalid stream configuration requested!");
2117 pthread_mutex_unlock(&mMutex);
2118 return rc;
2119 }
2120
Emilian Peev0f3c3162017-03-15 12:57:46 +00002121 if (1 < pdStatCount) {
2122 LOGE("HAL doesn't support multiple PD streams");
2123 pthread_mutex_unlock(&mMutex);
2124 return -EINVAL;
2125 }
2126
2127 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2128 (1 == pdStatCount)) {
2129 LOGE("HAL doesn't support PD streams in HFR mode!");
2130 pthread_mutex_unlock(&mMutex);
2131 return -EINVAL;
2132 }
2133
Thierry Strudel3d639192016-09-09 11:52:26 -07002134 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2135 for (size_t i = 0; i < streamList->num_streams; i++) {
2136 camera3_stream_t *newStream = streamList->streams[i];
2137 LOGH("newStream type = %d, stream format = %d "
2138 "stream size : %d x %d, stream rotation = %d",
2139 newStream->stream_type, newStream->format,
2140 newStream->width, newStream->height, newStream->rotation);
2141 //if the stream is in the mStreamList validate it
2142 bool stream_exists = false;
2143 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2144 it != mStreamInfo.end(); it++) {
2145 if ((*it)->stream == newStream) {
2146 QCamera3ProcessingChannel *channel =
2147 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2148 stream_exists = true;
2149 if (channel)
2150 delete channel;
2151 (*it)->status = VALID;
2152 (*it)->stream->priv = NULL;
2153 (*it)->channel = NULL;
2154 }
2155 }
2156 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2157 //new stream
2158 stream_info_t* stream_info;
2159 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2160 if (!stream_info) {
2161 LOGE("Could not allocate stream info");
2162 rc = -ENOMEM;
2163 pthread_mutex_unlock(&mMutex);
2164 return rc;
2165 }
2166 stream_info->stream = newStream;
2167 stream_info->status = VALID;
2168 stream_info->channel = NULL;
2169 mStreamInfo.push_back(stream_info);
2170 }
2171 /* Covers Opaque ZSL and API1 F/W ZSL */
2172 if (IS_USAGE_ZSL(newStream->usage)
2173 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2174 if (zslStream != NULL) {
2175 LOGE("Multiple input/reprocess streams requested!");
2176 pthread_mutex_unlock(&mMutex);
2177 return BAD_VALUE;
2178 }
2179 zslStream = newStream;
2180 }
2181 /* Covers YUV reprocess */
2182 if (inputStream != NULL) {
2183 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2184 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2185 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2186 && inputStream->width == newStream->width
2187 && inputStream->height == newStream->height) {
2188 if (zslStream != NULL) {
2189 /* This scenario indicates multiple YUV streams with same size
2190 * as input stream have been requested, since zsl stream handle
2191 * is solely use for the purpose of overriding the size of streams
2192 * which share h/w streams we will just make a guess here as to
2193 * which of the stream is a ZSL stream, this will be refactored
2194 * once we make generic logic for streams sharing encoder output
2195 */
2196 LOGH("Warning, Multiple ip/reprocess streams requested!");
2197 }
2198 zslStream = newStream;
2199 }
2200 }
2201 }
2202
2203 /* If a zsl stream is set, we know that we have configured at least one input or
2204 bidirectional stream */
2205 if (NULL != zslStream) {
2206 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2207 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2208 mInputStreamInfo.format = zslStream->format;
2209 mInputStreamInfo.usage = zslStream->usage;
2210 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2211 mInputStreamInfo.dim.width,
2212 mInputStreamInfo.dim.height,
2213 mInputStreamInfo.format, mInputStreamInfo.usage);
2214 }
2215
2216 cleanAndSortStreamInfo();
2217 if (mMetadataChannel) {
2218 delete mMetadataChannel;
2219 mMetadataChannel = NULL;
2220 }
2221 if (mSupportChannel) {
2222 delete mSupportChannel;
2223 mSupportChannel = NULL;
2224 }
2225
2226 if (mAnalysisChannel) {
2227 delete mAnalysisChannel;
2228 mAnalysisChannel = NULL;
2229 }
2230
2231 if (mDummyBatchChannel) {
2232 delete mDummyBatchChannel;
2233 mDummyBatchChannel = NULL;
2234 }
2235
Emilian Peev7650c122017-01-19 08:24:33 -08002236 if (mDepthChannel) {
2237 mDepthChannel = NULL;
2238 }
2239
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002240 mShutterDispatcher.clear();
2241 mOutputBufferDispatcher.clear();
2242
Thierry Strudel2896d122017-02-23 19:18:03 -08002243 char is_type_value[PROPERTY_VALUE_MAX];
2244 property_get("persist.camera.is_type", is_type_value, "4");
2245 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2246
Binhao Line406f062017-05-03 14:39:44 -07002247 char property_value[PROPERTY_VALUE_MAX];
2248 property_get("persist.camera.gzoom.at", property_value, "0");
2249 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002250 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2251 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2252 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2253 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002254
2255 property_get("persist.camera.gzoom.4k", property_value, "0");
2256 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2257
Thierry Strudel3d639192016-09-09 11:52:26 -07002258 //Create metadata channel and initialize it
2259 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2260 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2261 gCamCapability[mCameraId]->color_arrangement);
2262 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2263 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002264 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002265 if (mMetadataChannel == NULL) {
2266 LOGE("failed to allocate metadata channel");
2267 rc = -ENOMEM;
2268 pthread_mutex_unlock(&mMutex);
2269 return rc;
2270 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002271 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002272 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2273 if (rc < 0) {
2274 LOGE("metadata channel initialization failed");
2275 delete mMetadataChannel;
2276 mMetadataChannel = NULL;
2277 pthread_mutex_unlock(&mMutex);
2278 return rc;
2279 }
2280
Thierry Strudel2896d122017-02-23 19:18:03 -08002281 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002282 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002283 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002284 // Keep track of preview/video streams indices.
2285 // There could be more than one preview streams, but only one video stream.
2286 int32_t video_stream_idx = -1;
2287 int32_t preview_stream_idx[streamList->num_streams];
2288 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002289 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2290 /* Allocate channel objects for the requested streams */
2291 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002292
Thierry Strudel3d639192016-09-09 11:52:26 -07002293 camera3_stream_t *newStream = streamList->streams[i];
2294 uint32_t stream_usage = newStream->usage;
2295 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2296 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2297 struct camera_info *p_info = NULL;
2298 pthread_mutex_lock(&gCamLock);
2299 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2300 pthread_mutex_unlock(&gCamLock);
2301 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2302 || IS_USAGE_ZSL(newStream->usage)) &&
2303 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002304 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002305 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002306 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2307 if (bUseCommonFeatureMask)
2308 zsl_ppmask = commonFeatureMask;
2309 else
2310 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002311 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002312 if (numStreamsOnEncoder > 0)
2313 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2314 else
2315 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002316 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002317 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002318 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002319 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002320 LOGH("Input stream configured, reprocess config");
2321 } else {
2322 //for non zsl streams find out the format
2323 switch (newStream->format) {
2324 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2325 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002326 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002327 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2328 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2329 /* add additional features to pp feature mask */
2330 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2331 mStreamConfigInfo.num_streams);
2332
2333 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2334 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2335 CAM_STREAM_TYPE_VIDEO;
2336 if (m_bTnrEnabled && m_bTnrVideo) {
2337 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2338 CAM_QCOM_FEATURE_CPP_TNR;
2339 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2340 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2341 ~CAM_QCOM_FEATURE_CDS;
2342 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002343 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2344 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2345 CAM_QTI_FEATURE_PPEISCORE;
2346 }
Binhao Line406f062017-05-03 14:39:44 -07002347 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2348 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2349 CAM_QCOM_FEATURE_GOOG_ZOOM;
2350 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002351 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002352 } else {
2353 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2354 CAM_STREAM_TYPE_PREVIEW;
2355 if (m_bTnrEnabled && m_bTnrPreview) {
2356 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2357 CAM_QCOM_FEATURE_CPP_TNR;
2358 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2359 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2360 ~CAM_QCOM_FEATURE_CDS;
2361 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002362 if(!m_bSwTnrPreview) {
2363 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2364 ~CAM_QTI_FEATURE_SW_TNR;
2365 }
Binhao Line406f062017-05-03 14:39:44 -07002366 if (is_goog_zoom_preview_enabled) {
2367 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2368 CAM_QCOM_FEATURE_GOOG_ZOOM;
2369 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002370 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002371 padding_info.width_padding = mSurfaceStridePadding;
2372 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002373 previewSize.width = (int32_t)newStream->width;
2374 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002375 }
2376 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2377 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2378 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2379 newStream->height;
2380 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2381 newStream->width;
2382 }
2383 }
2384 break;
2385 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002386 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002387 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2388 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2389 if (bUseCommonFeatureMask)
2390 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2391 commonFeatureMask;
2392 else
2393 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2394 CAM_QCOM_FEATURE_NONE;
2395 } else {
2396 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2397 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2398 }
2399 break;
2400 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002401 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002402 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2403 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2404 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2405 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2406 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002407 /* Remove rotation if it is not supported
2408 for 4K LiveVideo snapshot case (online processing) */
2409 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2410 CAM_QCOM_FEATURE_ROTATION)) {
2411 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2412 &= ~CAM_QCOM_FEATURE_ROTATION;
2413 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002414 } else {
2415 if (bUseCommonFeatureMask &&
2416 isOnEncoder(maxViewfinderSize, newStream->width,
2417 newStream->height)) {
2418 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2419 } else {
2420 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2421 }
2422 }
2423 if (isZsl) {
2424 if (zslStream) {
2425 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2426 (int32_t)zslStream->width;
2427 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2428 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002429 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2430 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002431 } else {
2432 LOGE("Error, No ZSL stream identified");
2433 pthread_mutex_unlock(&mMutex);
2434 return -EINVAL;
2435 }
2436 } else if (m_bIs4KVideo) {
2437 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2438 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2439 } else if (bYuv888OverrideJpeg) {
2440 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2441 (int32_t)largeYuv888Size.width;
2442 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2443 (int32_t)largeYuv888Size.height;
2444 }
2445 break;
2446 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2447 case HAL_PIXEL_FORMAT_RAW16:
2448 case HAL_PIXEL_FORMAT_RAW10:
2449 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2450 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2451 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002452 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2453 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2454 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2455 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2456 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2457 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2458 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2459 gCamCapability[mCameraId]->dt[mPDIndex];
2460 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2461 gCamCapability[mCameraId]->vc[mPDIndex];
2462 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002463 break;
2464 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002465 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002466 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2467 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2468 break;
2469 }
2470 }
2471
2472 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2473 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2474 gCamCapability[mCameraId]->color_arrangement);
2475
2476 if (newStream->priv == NULL) {
2477 //New stream, construct channel
2478 switch (newStream->stream_type) {
2479 case CAMERA3_STREAM_INPUT:
2480 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2481 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2482 break;
2483 case CAMERA3_STREAM_BIDIRECTIONAL:
2484 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2485 GRALLOC_USAGE_HW_CAMERA_WRITE;
2486 break;
2487 case CAMERA3_STREAM_OUTPUT:
2488 /* For video encoding stream, set read/write rarely
2489 * flag so that they may be set to un-cached */
2490 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2491 newStream->usage |=
2492 (GRALLOC_USAGE_SW_READ_RARELY |
2493 GRALLOC_USAGE_SW_WRITE_RARELY |
2494 GRALLOC_USAGE_HW_CAMERA_WRITE);
2495 else if (IS_USAGE_ZSL(newStream->usage))
2496 {
2497 LOGD("ZSL usage flag skipping");
2498 }
2499 else if (newStream == zslStream
2500 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2501 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2502 } else
2503 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2504 break;
2505 default:
2506 LOGE("Invalid stream_type %d", newStream->stream_type);
2507 break;
2508 }
2509
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002510 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002511 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2512 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2513 QCamera3ProcessingChannel *channel = NULL;
2514 switch (newStream->format) {
2515 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2516 if ((newStream->usage &
2517 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2518 (streamList->operation_mode ==
2519 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2520 ) {
2521 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2522 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002523 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002524 this,
2525 newStream,
2526 (cam_stream_type_t)
2527 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2528 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2529 mMetadataChannel,
2530 0); //heap buffers are not required for HFR video channel
2531 if (channel == NULL) {
2532 LOGE("allocation of channel failed");
2533 pthread_mutex_unlock(&mMutex);
2534 return -ENOMEM;
2535 }
2536 //channel->getNumBuffers() will return 0 here so use
2537 //MAX_INFLIGH_HFR_REQUESTS
2538 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2539 newStream->priv = channel;
2540 LOGI("num video buffers in HFR mode: %d",
2541 MAX_INFLIGHT_HFR_REQUESTS);
2542 } else {
2543 /* Copy stream contents in HFR preview only case to create
2544 * dummy batch channel so that sensor streaming is in
2545 * HFR mode */
2546 if (!m_bIsVideo && (streamList->operation_mode ==
2547 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2548 mDummyBatchStream = *newStream;
2549 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002550 int bufferCount = MAX_INFLIGHT_REQUESTS;
2551 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2552 CAM_STREAM_TYPE_VIDEO) {
2553 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2554 bufferCount = MAX_VIDEO_BUFFERS;
2555 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002556 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2557 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002558 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002559 this,
2560 newStream,
2561 (cam_stream_type_t)
2562 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2563 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2564 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002565 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002566 if (channel == NULL) {
2567 LOGE("allocation of channel failed");
2568 pthread_mutex_unlock(&mMutex);
2569 return -ENOMEM;
2570 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002571 /* disable UBWC for preview, though supported,
2572 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002573 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002574 (previewSize.width == (int32_t)videoWidth)&&
2575 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002576 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002577 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002578 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002579 /* When goog_zoom is linked to the preview or video stream,
2580 * disable ubwc to the linked stream */
2581 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2582 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2583 channel->setUBWCEnabled(false);
2584 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002585 newStream->max_buffers = channel->getNumBuffers();
2586 newStream->priv = channel;
2587 }
2588 break;
2589 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2590 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2591 mChannelHandle,
2592 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002593 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002594 this,
2595 newStream,
2596 (cam_stream_type_t)
2597 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2598 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2599 mMetadataChannel);
2600 if (channel == NULL) {
2601 LOGE("allocation of YUV channel failed");
2602 pthread_mutex_unlock(&mMutex);
2603 return -ENOMEM;
2604 }
2605 newStream->max_buffers = channel->getNumBuffers();
2606 newStream->priv = channel;
2607 break;
2608 }
2609 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2610 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002611 case HAL_PIXEL_FORMAT_RAW10: {
2612 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2613 (HAL_DATASPACE_DEPTH != newStream->data_space))
2614 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002615 mRawChannel = new QCamera3RawChannel(
2616 mCameraHandle->camera_handle, mChannelHandle,
2617 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002618 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002619 this, newStream,
2620 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002621 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002622 if (mRawChannel == NULL) {
2623 LOGE("allocation of raw channel failed");
2624 pthread_mutex_unlock(&mMutex);
2625 return -ENOMEM;
2626 }
2627 newStream->max_buffers = mRawChannel->getNumBuffers();
2628 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2629 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002630 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002631 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002632 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2633 mDepthChannel = new QCamera3DepthChannel(
2634 mCameraHandle->camera_handle, mChannelHandle,
2635 mCameraHandle->ops, NULL, NULL, &padding_info,
2636 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2637 mMetadataChannel);
2638 if (NULL == mDepthChannel) {
2639 LOGE("Allocation of depth channel failed");
2640 pthread_mutex_unlock(&mMutex);
2641 return NO_MEMORY;
2642 }
2643 newStream->priv = mDepthChannel;
2644 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2645 } else {
2646 // Max live snapshot inflight buffer is 1. This is to mitigate
2647 // frame drop issues for video snapshot. The more buffers being
2648 // allocated, the more frame drops there are.
2649 mPictureChannel = new QCamera3PicChannel(
2650 mCameraHandle->camera_handle, mChannelHandle,
2651 mCameraHandle->ops, captureResultCb,
2652 setBufferErrorStatus, &padding_info, this, newStream,
2653 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2654 m_bIs4KVideo, isZsl, mMetadataChannel,
2655 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2656 if (mPictureChannel == NULL) {
2657 LOGE("allocation of channel failed");
2658 pthread_mutex_unlock(&mMutex);
2659 return -ENOMEM;
2660 }
2661 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2662 newStream->max_buffers = mPictureChannel->getNumBuffers();
2663 mPictureChannel->overrideYuvSize(
2664 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2665 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002666 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002667 break;
2668
2669 default:
2670 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002671 pthread_mutex_unlock(&mMutex);
2672 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002673 }
2674 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2675 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2676 } else {
2677 LOGE("Error, Unknown stream type");
2678 pthread_mutex_unlock(&mMutex);
2679 return -EINVAL;
2680 }
2681
2682 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002683 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002684 // Here we only care whether it's EIS3 or not
2685 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2686 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2687 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2688 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002689 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002690 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002691 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002692 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2693 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2694 }
2695 }
2696
2697 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2698 it != mStreamInfo.end(); it++) {
2699 if ((*it)->stream == newStream) {
2700 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2701 break;
2702 }
2703 }
2704 } else {
2705 // Channel already exists for this stream
2706 // Do nothing for now
2707 }
2708 padding_info = gCamCapability[mCameraId]->padding_info;
2709
Emilian Peev7650c122017-01-19 08:24:33 -08002710 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002711 * since there is no real stream associated with it
2712 */
Emilian Peev7650c122017-01-19 08:24:33 -08002713 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002714 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2715 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002716 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002717 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002718 }
2719
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002720 // Let buffer dispatcher know the configured streams.
2721 mOutputBufferDispatcher.configureStreams(streamList);
2722
Binhao Lincdb362a2017-04-20 13:31:54 -07002723 // By default, preview stream TNR is disabled.
2724 // Enable TNR to the preview stream if all conditions below are satisfied:
2725 // 1. resolution <= 1080p.
2726 // 2. preview resolution == video resolution.
2727 // 3. video stream TNR is enabled.
2728 // 4. EIS2.0
2729 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2730 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2731 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2732 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2733 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2734 video_stream->width == preview_stream->width &&
2735 video_stream->height == preview_stream->height) {
2736 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2737 CAM_QCOM_FEATURE_CPP_TNR;
2738 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2739 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2740 ~CAM_QCOM_FEATURE_CDS;
2741 }
2742 }
2743
Thierry Strudel2896d122017-02-23 19:18:03 -08002744 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2745 onlyRaw = false;
2746 }
2747
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002748 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002749 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002750 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002751 cam_analysis_info_t analysisInfo;
2752 int32_t ret = NO_ERROR;
2753 ret = mCommon.getAnalysisInfo(
2754 FALSE,
2755 analysisFeatureMask,
2756 &analysisInfo);
2757 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002758 cam_color_filter_arrangement_t analysis_color_arrangement =
2759 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2760 CAM_FILTER_ARRANGEMENT_Y :
2761 gCamCapability[mCameraId]->color_arrangement);
2762 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2763 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002764 cam_dimension_t analysisDim;
2765 analysisDim = mCommon.getMatchingDimension(previewSize,
2766 analysisInfo.analysis_recommended_res);
2767
2768 mAnalysisChannel = new QCamera3SupportChannel(
2769 mCameraHandle->camera_handle,
2770 mChannelHandle,
2771 mCameraHandle->ops,
2772 &analysisInfo.analysis_padding_info,
2773 analysisFeatureMask,
2774 CAM_STREAM_TYPE_ANALYSIS,
2775 &analysisDim,
2776 (analysisInfo.analysis_format
2777 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2778 : CAM_FORMAT_YUV_420_NV21),
2779 analysisInfo.hw_analysis_supported,
2780 gCamCapability[mCameraId]->color_arrangement,
2781 this,
2782 0); // force buffer count to 0
2783 } else {
2784 LOGW("getAnalysisInfo failed, ret = %d", ret);
2785 }
2786 if (!mAnalysisChannel) {
2787 LOGW("Analysis channel cannot be created");
2788 }
2789 }
2790
Thierry Strudel3d639192016-09-09 11:52:26 -07002791 //RAW DUMP channel
2792 if (mEnableRawDump && isRawStreamRequested == false){
2793 cam_dimension_t rawDumpSize;
2794 rawDumpSize = getMaxRawSize(mCameraId);
2795 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2796 setPAAFSupport(rawDumpFeatureMask,
2797 CAM_STREAM_TYPE_RAW,
2798 gCamCapability[mCameraId]->color_arrangement);
2799 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2800 mChannelHandle,
2801 mCameraHandle->ops,
2802 rawDumpSize,
2803 &padding_info,
2804 this, rawDumpFeatureMask);
2805 if (!mRawDumpChannel) {
2806 LOGE("Raw Dump channel cannot be created");
2807 pthread_mutex_unlock(&mMutex);
2808 return -ENOMEM;
2809 }
2810 }
2811
Thierry Strudel3d639192016-09-09 11:52:26 -07002812 if (mAnalysisChannel) {
2813 cam_analysis_info_t analysisInfo;
2814 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2815 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2816 CAM_STREAM_TYPE_ANALYSIS;
2817 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2818 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002819 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002820 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2821 &analysisInfo);
2822 if (rc != NO_ERROR) {
2823 LOGE("getAnalysisInfo failed, ret = %d", rc);
2824 pthread_mutex_unlock(&mMutex);
2825 return rc;
2826 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002827 cam_color_filter_arrangement_t analysis_color_arrangement =
2828 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2829 CAM_FILTER_ARRANGEMENT_Y :
2830 gCamCapability[mCameraId]->color_arrangement);
2831 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2832 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2833 analysis_color_arrangement);
2834
Thierry Strudel3d639192016-09-09 11:52:26 -07002835 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002836 mCommon.getMatchingDimension(previewSize,
2837 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002838 mStreamConfigInfo.num_streams++;
2839 }
2840
Thierry Strudel2896d122017-02-23 19:18:03 -08002841 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002842 cam_analysis_info_t supportInfo;
2843 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2844 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2845 setPAAFSupport(callbackFeatureMask,
2846 CAM_STREAM_TYPE_CALLBACK,
2847 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002848 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002849 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002850 if (ret != NO_ERROR) {
2851 /* Ignore the error for Mono camera
2852 * because the PAAF bit mask is only set
2853 * for CAM_STREAM_TYPE_ANALYSIS stream type
2854 */
2855 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2856 LOGW("getAnalysisInfo failed, ret = %d", ret);
2857 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002858 }
2859 mSupportChannel = new QCamera3SupportChannel(
2860 mCameraHandle->camera_handle,
2861 mChannelHandle,
2862 mCameraHandle->ops,
2863 &gCamCapability[mCameraId]->padding_info,
2864 callbackFeatureMask,
2865 CAM_STREAM_TYPE_CALLBACK,
2866 &QCamera3SupportChannel::kDim,
2867 CAM_FORMAT_YUV_420_NV21,
2868 supportInfo.hw_analysis_supported,
2869 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002870 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002871 if (!mSupportChannel) {
2872 LOGE("dummy channel cannot be created");
2873 pthread_mutex_unlock(&mMutex);
2874 return -ENOMEM;
2875 }
2876 }
2877
2878 if (mSupportChannel) {
2879 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2880 QCamera3SupportChannel::kDim;
2881 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2882 CAM_STREAM_TYPE_CALLBACK;
2883 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2884 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2885 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2886 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2887 gCamCapability[mCameraId]->color_arrangement);
2888 mStreamConfigInfo.num_streams++;
2889 }
2890
2891 if (mRawDumpChannel) {
2892 cam_dimension_t rawSize;
2893 rawSize = getMaxRawSize(mCameraId);
2894 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2895 rawSize;
2896 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2897 CAM_STREAM_TYPE_RAW;
2898 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2899 CAM_QCOM_FEATURE_NONE;
2900 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2901 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2902 gCamCapability[mCameraId]->color_arrangement);
2903 mStreamConfigInfo.num_streams++;
2904 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002905
2906 if (mHdrPlusRawSrcChannel) {
2907 cam_dimension_t rawSize;
2908 rawSize = getMaxRawSize(mCameraId);
2909 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2910 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2911 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2912 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2913 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2914 gCamCapability[mCameraId]->color_arrangement);
2915 mStreamConfigInfo.num_streams++;
2916 }
2917
Thierry Strudel3d639192016-09-09 11:52:26 -07002918 /* In HFR mode, if video stream is not added, create a dummy channel so that
2919 * ISP can create a batch mode even for preview only case. This channel is
2920 * never 'start'ed (no stream-on), it is only 'initialized' */
2921 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2922 !m_bIsVideo) {
2923 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2924 setPAAFSupport(dummyFeatureMask,
2925 CAM_STREAM_TYPE_VIDEO,
2926 gCamCapability[mCameraId]->color_arrangement);
2927 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2928 mChannelHandle,
2929 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002930 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002931 this,
2932 &mDummyBatchStream,
2933 CAM_STREAM_TYPE_VIDEO,
2934 dummyFeatureMask,
2935 mMetadataChannel);
2936 if (NULL == mDummyBatchChannel) {
2937 LOGE("creation of mDummyBatchChannel failed."
2938 "Preview will use non-hfr sensor mode ");
2939 }
2940 }
2941 if (mDummyBatchChannel) {
2942 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2943 mDummyBatchStream.width;
2944 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2945 mDummyBatchStream.height;
2946 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2947 CAM_STREAM_TYPE_VIDEO;
2948 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2949 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2950 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2951 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2952 gCamCapability[mCameraId]->color_arrangement);
2953 mStreamConfigInfo.num_streams++;
2954 }
2955
2956 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2957 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002958 m_bIs4KVideo ? 0 :
2959 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002960
2961 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2962 for (pendingRequestIterator i = mPendingRequestsList.begin();
2963 i != mPendingRequestsList.end();) {
2964 i = erasePendingRequest(i);
2965 }
2966 mPendingFrameDropList.clear();
2967 // Initialize/Reset the pending buffers list
2968 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2969 req.mPendingBufferList.clear();
2970 }
2971 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2972
Thierry Strudel3d639192016-09-09 11:52:26 -07002973 mCurJpegMeta.clear();
2974 //Get min frame duration for this streams configuration
2975 deriveMinFrameDuration();
2976
Chien-Yu Chenee335912017-02-09 17:53:20 -08002977 mFirstPreviewIntentSeen = false;
2978
2979 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002980 {
2981 Mutex::Autolock l(gHdrPlusClientLock);
2982 disableHdrPlusModeLocked();
2983 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002984
Thierry Strudel3d639192016-09-09 11:52:26 -07002985 // Update state
2986 mState = CONFIGURED;
2987
Shuzhen Wang3c077d72017-04-20 22:48:59 -07002988 mFirstMetadataCallback = true;
2989
Thierry Strudel3d639192016-09-09 11:52:26 -07002990 pthread_mutex_unlock(&mMutex);
2991
2992 return rc;
2993}
2994
2995/*===========================================================================
2996 * FUNCTION : validateCaptureRequest
2997 *
2998 * DESCRIPTION: validate a capture request from camera service
2999 *
3000 * PARAMETERS :
3001 * @request : request from framework to process
3002 *
3003 * RETURN :
3004 *
3005 *==========================================================================*/
3006int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003007 camera3_capture_request_t *request,
3008 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003009{
3010 ssize_t idx = 0;
3011 const camera3_stream_buffer_t *b;
3012 CameraMetadata meta;
3013
3014 /* Sanity check the request */
3015 if (request == NULL) {
3016 LOGE("NULL capture request");
3017 return BAD_VALUE;
3018 }
3019
3020 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3021 /*settings cannot be null for the first request*/
3022 return BAD_VALUE;
3023 }
3024
3025 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003026 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3027 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003028 LOGE("Request %d: No output buffers provided!",
3029 __FUNCTION__, frameNumber);
3030 return BAD_VALUE;
3031 }
3032 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3033 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3034 request->num_output_buffers, MAX_NUM_STREAMS);
3035 return BAD_VALUE;
3036 }
3037 if (request->input_buffer != NULL) {
3038 b = request->input_buffer;
3039 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3040 LOGE("Request %d: Buffer %ld: Status not OK!",
3041 frameNumber, (long)idx);
3042 return BAD_VALUE;
3043 }
3044 if (b->release_fence != -1) {
3045 LOGE("Request %d: Buffer %ld: Has a release fence!",
3046 frameNumber, (long)idx);
3047 return BAD_VALUE;
3048 }
3049 if (b->buffer == NULL) {
3050 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3051 frameNumber, (long)idx);
3052 return BAD_VALUE;
3053 }
3054 }
3055
3056 // Validate all buffers
3057 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003058 if (b == NULL) {
3059 return BAD_VALUE;
3060 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003061 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003062 QCamera3ProcessingChannel *channel =
3063 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3064 if (channel == NULL) {
3065 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3066 frameNumber, (long)idx);
3067 return BAD_VALUE;
3068 }
3069 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3070 LOGE("Request %d: Buffer %ld: Status not OK!",
3071 frameNumber, (long)idx);
3072 return BAD_VALUE;
3073 }
3074 if (b->release_fence != -1) {
3075 LOGE("Request %d: Buffer %ld: Has a release fence!",
3076 frameNumber, (long)idx);
3077 return BAD_VALUE;
3078 }
3079 if (b->buffer == NULL) {
3080 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3081 frameNumber, (long)idx);
3082 return BAD_VALUE;
3083 }
3084 if (*(b->buffer) == NULL) {
3085 LOGE("Request %d: Buffer %ld: NULL private handle!",
3086 frameNumber, (long)idx);
3087 return BAD_VALUE;
3088 }
3089 idx++;
3090 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003091 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003092 return NO_ERROR;
3093}
3094
3095/*===========================================================================
3096 * FUNCTION : deriveMinFrameDuration
3097 *
3098 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3099 * on currently configured streams.
3100 *
3101 * PARAMETERS : NONE
3102 *
3103 * RETURN : NONE
3104 *
3105 *==========================================================================*/
3106void QCamera3HardwareInterface::deriveMinFrameDuration()
3107{
3108 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3109
3110 maxJpegDim = 0;
3111 maxProcessedDim = 0;
3112 maxRawDim = 0;
3113
3114 // Figure out maximum jpeg, processed, and raw dimensions
3115 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3116 it != mStreamInfo.end(); it++) {
3117
3118 // Input stream doesn't have valid stream_type
3119 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3120 continue;
3121
3122 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3123 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3124 if (dimension > maxJpegDim)
3125 maxJpegDim = dimension;
3126 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3127 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3128 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3129 if (dimension > maxRawDim)
3130 maxRawDim = dimension;
3131 } else {
3132 if (dimension > maxProcessedDim)
3133 maxProcessedDim = dimension;
3134 }
3135 }
3136
3137 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3138 MAX_SIZES_CNT);
3139
3140 //Assume all jpeg dimensions are in processed dimensions.
3141 if (maxJpegDim > maxProcessedDim)
3142 maxProcessedDim = maxJpegDim;
3143 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3144 if (maxProcessedDim > maxRawDim) {
3145 maxRawDim = INT32_MAX;
3146
3147 for (size_t i = 0; i < count; i++) {
3148 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3149 gCamCapability[mCameraId]->raw_dim[i].height;
3150 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3151 maxRawDim = dimension;
3152 }
3153 }
3154
3155 //Find minimum durations for processed, jpeg, and raw
3156 for (size_t i = 0; i < count; i++) {
3157 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3158 gCamCapability[mCameraId]->raw_dim[i].height) {
3159 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3160 break;
3161 }
3162 }
3163 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3164 for (size_t i = 0; i < count; i++) {
3165 if (maxProcessedDim ==
3166 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3167 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3168 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3169 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3170 break;
3171 }
3172 }
3173}
3174
3175/*===========================================================================
3176 * FUNCTION : getMinFrameDuration
3177 *
3178 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3179 * and current request configuration.
3180 *
3181 * PARAMETERS : @request: requset sent by the frameworks
3182 *
3183 * RETURN : min farme duration for a particular request
3184 *
3185 *==========================================================================*/
3186int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3187{
3188 bool hasJpegStream = false;
3189 bool hasRawStream = false;
3190 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3191 const camera3_stream_t *stream = request->output_buffers[i].stream;
3192 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3193 hasJpegStream = true;
3194 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3195 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3196 stream->format == HAL_PIXEL_FORMAT_RAW16)
3197 hasRawStream = true;
3198 }
3199
3200 if (!hasJpegStream)
3201 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3202 else
3203 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3204}
3205
3206/*===========================================================================
3207 * FUNCTION : handleBuffersDuringFlushLock
3208 *
3209 * DESCRIPTION: Account for buffers returned from back-end during flush
3210 * This function is executed while mMutex is held by the caller.
3211 *
3212 * PARAMETERS :
3213 * @buffer: image buffer for the callback
3214 *
3215 * RETURN :
3216 *==========================================================================*/
3217void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3218{
3219 bool buffer_found = false;
3220 for (List<PendingBuffersInRequest>::iterator req =
3221 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3222 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3223 for (List<PendingBufferInfo>::iterator i =
3224 req->mPendingBufferList.begin();
3225 i != req->mPendingBufferList.end(); i++) {
3226 if (i->buffer == buffer->buffer) {
3227 mPendingBuffersMap.numPendingBufsAtFlush--;
3228 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3229 buffer->buffer, req->frame_number,
3230 mPendingBuffersMap.numPendingBufsAtFlush);
3231 buffer_found = true;
3232 break;
3233 }
3234 }
3235 if (buffer_found) {
3236 break;
3237 }
3238 }
3239 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3240 //signal the flush()
3241 LOGD("All buffers returned to HAL. Continue flush");
3242 pthread_cond_signal(&mBuffersCond);
3243 }
3244}
3245
Thierry Strudel3d639192016-09-09 11:52:26 -07003246/*===========================================================================
3247 * FUNCTION : handleBatchMetadata
3248 *
3249 * DESCRIPTION: Handles metadata buffer callback in batch mode
3250 *
3251 * PARAMETERS : @metadata_buf: metadata buffer
3252 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3253 * the meta buf in this method
3254 *
3255 * RETURN :
3256 *
3257 *==========================================================================*/
3258void QCamera3HardwareInterface::handleBatchMetadata(
3259 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3260{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003261 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003262
3263 if (NULL == metadata_buf) {
3264 LOGE("metadata_buf is NULL");
3265 return;
3266 }
3267 /* In batch mode, the metdata will contain the frame number and timestamp of
3268 * the last frame in the batch. Eg: a batch containing buffers from request
3269 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3270 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3271 * multiple process_capture_results */
3272 metadata_buffer_t *metadata =
3273 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3274 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3275 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3276 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3277 uint32_t frame_number = 0, urgent_frame_number = 0;
3278 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3279 bool invalid_metadata = false;
3280 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3281 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003282 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003283
3284 int32_t *p_frame_number_valid =
3285 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3286 uint32_t *p_frame_number =
3287 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3288 int64_t *p_capture_time =
3289 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3290 int32_t *p_urgent_frame_number_valid =
3291 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3292 uint32_t *p_urgent_frame_number =
3293 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3294
3295 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3296 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3297 (NULL == p_urgent_frame_number)) {
3298 LOGE("Invalid metadata");
3299 invalid_metadata = true;
3300 } else {
3301 frame_number_valid = *p_frame_number_valid;
3302 last_frame_number = *p_frame_number;
3303 last_frame_capture_time = *p_capture_time;
3304 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3305 last_urgent_frame_number = *p_urgent_frame_number;
3306 }
3307
3308 /* In batchmode, when no video buffers are requested, set_parms are sent
3309 * for every capture_request. The difference between consecutive urgent
3310 * frame numbers and frame numbers should be used to interpolate the
3311 * corresponding frame numbers and time stamps */
3312 pthread_mutex_lock(&mMutex);
3313 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003314 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3315 if(idx < 0) {
3316 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3317 last_urgent_frame_number);
3318 mState = ERROR;
3319 pthread_mutex_unlock(&mMutex);
3320 return;
3321 }
3322 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003323 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3324 first_urgent_frame_number;
3325
3326 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3327 urgent_frame_number_valid,
3328 first_urgent_frame_number, last_urgent_frame_number);
3329 }
3330
3331 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003332 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3333 if(idx < 0) {
3334 LOGE("Invalid frame number received: %d. Irrecoverable error",
3335 last_frame_number);
3336 mState = ERROR;
3337 pthread_mutex_unlock(&mMutex);
3338 return;
3339 }
3340 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003341 frameNumDiff = last_frame_number + 1 -
3342 first_frame_number;
3343 mPendingBatchMap.removeItem(last_frame_number);
3344
3345 LOGD("frm: valid: %d frm_num: %d - %d",
3346 frame_number_valid,
3347 first_frame_number, last_frame_number);
3348
3349 }
3350 pthread_mutex_unlock(&mMutex);
3351
3352 if (urgent_frame_number_valid || frame_number_valid) {
3353 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3354 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3355 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3356 urgentFrameNumDiff, last_urgent_frame_number);
3357 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3358 LOGE("frameNumDiff: %d frameNum: %d",
3359 frameNumDiff, last_frame_number);
3360 }
3361
3362 for (size_t i = 0; i < loopCount; i++) {
3363 /* handleMetadataWithLock is called even for invalid_metadata for
3364 * pipeline depth calculation */
3365 if (!invalid_metadata) {
3366 /* Infer frame number. Batch metadata contains frame number of the
3367 * last frame */
3368 if (urgent_frame_number_valid) {
3369 if (i < urgentFrameNumDiff) {
3370 urgent_frame_number =
3371 first_urgent_frame_number + i;
3372 LOGD("inferred urgent frame_number: %d",
3373 urgent_frame_number);
3374 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3375 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3376 } else {
3377 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3378 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3379 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3380 }
3381 }
3382
3383 /* Infer frame number. Batch metadata contains frame number of the
3384 * last frame */
3385 if (frame_number_valid) {
3386 if (i < frameNumDiff) {
3387 frame_number = first_frame_number + i;
3388 LOGD("inferred frame_number: %d", frame_number);
3389 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3390 CAM_INTF_META_FRAME_NUMBER, frame_number);
3391 } else {
3392 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3393 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3394 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3395 }
3396 }
3397
3398 if (last_frame_capture_time) {
3399 //Infer timestamp
3400 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003401 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003402 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003403 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003404 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3405 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3406 LOGD("batch capture_time: %lld, capture_time: %lld",
3407 last_frame_capture_time, capture_time);
3408 }
3409 }
3410 pthread_mutex_lock(&mMutex);
3411 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003412 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003413 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3414 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003415 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003416 pthread_mutex_unlock(&mMutex);
3417 }
3418
3419 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003420 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003421 mMetadataChannel->bufDone(metadata_buf);
3422 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003423 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003424 }
3425}
3426
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003427void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3428 camera3_error_msg_code_t errorCode)
3429{
3430 camera3_notify_msg_t notify_msg;
3431 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3432 notify_msg.type = CAMERA3_MSG_ERROR;
3433 notify_msg.message.error.error_code = errorCode;
3434 notify_msg.message.error.error_stream = NULL;
3435 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003436 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003437
3438 return;
3439}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003440
3441/*===========================================================================
3442 * FUNCTION : sendPartialMetadataWithLock
3443 *
3444 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3445 *
3446 * PARAMETERS : @metadata: metadata buffer
3447 * @requestIter: The iterator for the pending capture request for
3448 * which the partial result is being sen
3449 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3450 * last urgent metadata in a batch. Always true for non-batch mode
3451 *
3452 * RETURN :
3453 *
3454 *==========================================================================*/
3455
3456void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3457 metadata_buffer_t *metadata,
3458 const pendingRequestIterator requestIter,
3459 bool lastUrgentMetadataInBatch)
3460{
3461 camera3_capture_result_t result;
3462 memset(&result, 0, sizeof(camera3_capture_result_t));
3463
3464 requestIter->partial_result_cnt++;
3465
3466 // Extract 3A metadata
3467 result.result = translateCbUrgentMetadataToResultMetadata(
3468 metadata, lastUrgentMetadataInBatch);
3469 // Populate metadata result
3470 result.frame_number = requestIter->frame_number;
3471 result.num_output_buffers = 0;
3472 result.output_buffers = NULL;
3473 result.partial_result = requestIter->partial_result_cnt;
3474
3475 {
3476 Mutex::Autolock l(gHdrPlusClientLock);
3477 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3478 // Notify HDR+ client about the partial metadata.
3479 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3480 result.partial_result == PARTIAL_RESULT_COUNT);
3481 }
3482 }
3483
3484 orchestrateResult(&result);
3485 LOGD("urgent frame_number = %u", result.frame_number);
3486 free_camera_metadata((camera_metadata_t *)result.result);
3487}
3488
Thierry Strudel3d639192016-09-09 11:52:26 -07003489/*===========================================================================
3490 * FUNCTION : handleMetadataWithLock
3491 *
3492 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3493 *
3494 * PARAMETERS : @metadata_buf: metadata buffer
3495 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3496 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003497 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3498 * last urgent metadata in a batch. Always true for non-batch mode
3499 * @lastMetadataInBatch: Boolean to indicate whether this is the
3500 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003501 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3502 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003503 *
3504 * RETURN :
3505 *
3506 *==========================================================================*/
3507void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003508 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003509 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3510 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003511{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003512 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003513 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3514 //during flush do not send metadata from this thread
3515 LOGD("not sending metadata during flush or when mState is error");
3516 if (free_and_bufdone_meta_buf) {
3517 mMetadataChannel->bufDone(metadata_buf);
3518 free(metadata_buf);
3519 }
3520 return;
3521 }
3522
3523 //not in flush
3524 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3525 int32_t frame_number_valid, urgent_frame_number_valid;
3526 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003527 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003528 nsecs_t currentSysTime;
3529
3530 int32_t *p_frame_number_valid =
3531 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3532 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3533 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003534 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003535 int32_t *p_urgent_frame_number_valid =
3536 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3537 uint32_t *p_urgent_frame_number =
3538 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3539 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3540 metadata) {
3541 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3542 *p_frame_number_valid, *p_frame_number);
3543 }
3544
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003545 camera_metadata_t *resultMetadata = nullptr;
3546
Thierry Strudel3d639192016-09-09 11:52:26 -07003547 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3548 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3549 LOGE("Invalid metadata");
3550 if (free_and_bufdone_meta_buf) {
3551 mMetadataChannel->bufDone(metadata_buf);
3552 free(metadata_buf);
3553 }
3554 goto done_metadata;
3555 }
3556 frame_number_valid = *p_frame_number_valid;
3557 frame_number = *p_frame_number;
3558 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003559 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003560 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3561 urgent_frame_number = *p_urgent_frame_number;
3562 currentSysTime = systemTime(CLOCK_MONOTONIC);
3563
Jason Lee603176d2017-05-31 11:43:27 -07003564 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3565 const int tries = 3;
3566 nsecs_t bestGap, measured;
3567 for (int i = 0; i < tries; ++i) {
3568 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3569 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3570 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3571 const nsecs_t gap = tmono2 - tmono;
3572 if (i == 0 || gap < bestGap) {
3573 bestGap = gap;
3574 measured = tbase - ((tmono + tmono2) >> 1);
3575 }
3576 }
3577 capture_time -= measured;
3578 }
3579
Thierry Strudel3d639192016-09-09 11:52:26 -07003580 // Detect if buffers from any requests are overdue
3581 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003582 int64_t timeout;
3583 {
3584 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3585 // If there is a pending HDR+ request, the following requests may be blocked until the
3586 // HDR+ request is done. So allow a longer timeout.
3587 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3588 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3589 }
3590
3591 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003592 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003593 assert(missed.stream->priv);
3594 if (missed.stream->priv) {
3595 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3596 assert(ch->mStreams[0]);
3597 if (ch->mStreams[0]) {
3598 LOGE("Cancel missing frame = %d, buffer = %p,"
3599 "stream type = %d, stream format = %d",
3600 req.frame_number, missed.buffer,
3601 ch->mStreams[0]->getMyType(), missed.stream->format);
3602 ch->timeoutFrame(req.frame_number);
3603 }
3604 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003605 }
3606 }
3607 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003608 //For the very first metadata callback, regardless whether it contains valid
3609 //frame number, send the partial metadata for the jumpstarting requests.
3610 //Note that this has to be done even if the metadata doesn't contain valid
3611 //urgent frame number, because in the case only 1 request is ever submitted
3612 //to HAL, there won't be subsequent valid urgent frame number.
3613 if (mFirstMetadataCallback) {
3614 for (pendingRequestIterator i =
3615 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3616 if (i->bUseFirstPartial) {
3617 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3618 }
3619 }
3620 mFirstMetadataCallback = false;
3621 }
3622
Thierry Strudel3d639192016-09-09 11:52:26 -07003623 //Partial result on process_capture_result for timestamp
3624 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003625 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003626
3627 //Recieved an urgent Frame Number, handle it
3628 //using partial results
3629 for (pendingRequestIterator i =
3630 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3631 LOGD("Iterator Frame = %d urgent frame = %d",
3632 i->frame_number, urgent_frame_number);
3633
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003634 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003635 (i->partial_result_cnt == 0)) {
3636 LOGE("Error: HAL missed urgent metadata for frame number %d",
3637 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003638 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003639 }
3640
3641 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003642 i->partial_result_cnt == 0) {
3643 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003644 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3645 // Instant AEC settled for this frame.
3646 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3647 mInstantAECSettledFrameNumber = urgent_frame_number;
3648 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003649 break;
3650 }
3651 }
3652 }
3653
3654 if (!frame_number_valid) {
3655 LOGD("Not a valid normal frame number, used as SOF only");
3656 if (free_and_bufdone_meta_buf) {
3657 mMetadataChannel->bufDone(metadata_buf);
3658 free(metadata_buf);
3659 }
3660 goto done_metadata;
3661 }
3662 LOGH("valid frame_number = %u, capture_time = %lld",
3663 frame_number, capture_time);
3664
Emilian Peev7650c122017-01-19 08:24:33 -08003665 if (metadata->is_depth_data_valid) {
3666 handleDepthDataLocked(metadata->depth_data, frame_number);
3667 }
3668
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003669 // Check whether any stream buffer corresponding to this is dropped or not
3670 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3671 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3672 for (auto & pendingRequest : mPendingRequestsList) {
3673 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3674 mInstantAECSettledFrameNumber)) {
3675 camera3_notify_msg_t notify_msg = {};
3676 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003677 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003678 QCamera3ProcessingChannel *channel =
3679 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003680 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003681 if (p_cam_frame_drop) {
3682 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003683 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003684 // Got the stream ID for drop frame.
3685 dropFrame = true;
3686 break;
3687 }
3688 }
3689 } else {
3690 // This is instant AEC case.
3691 // For instant AEC drop the stream untill AEC is settled.
3692 dropFrame = true;
3693 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003694
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003695 if (dropFrame) {
3696 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3697 if (p_cam_frame_drop) {
3698 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003699 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003700 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003701 } else {
3702 // For instant AEC, inform frame drop and frame number
3703 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3704 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003705 pendingRequest.frame_number, streamID,
3706 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003707 }
3708 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003709 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003710 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003711 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003712 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003713 if (p_cam_frame_drop) {
3714 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003715 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003716 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003717 } else {
3718 // For instant AEC, inform frame drop and frame number
3719 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3720 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003721 pendingRequest.frame_number, streamID,
3722 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003723 }
3724 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003725 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003726 PendingFrameDrop.stream_ID = streamID;
3727 // Add the Frame drop info to mPendingFrameDropList
3728 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003729 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003730 }
3731 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003732 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003733
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003734 for (auto & pendingRequest : mPendingRequestsList) {
3735 // Find the pending request with the frame number.
3736 if (pendingRequest.frame_number == frame_number) {
3737 // Update the sensor timestamp.
3738 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003739
Thierry Strudel3d639192016-09-09 11:52:26 -07003740
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003741 /* Set the timestamp in display metadata so that clients aware of
3742 private_handle such as VT can use this un-modified timestamps.
3743 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003744 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003745
Thierry Strudel3d639192016-09-09 11:52:26 -07003746 // Find channel requiring metadata, meaning internal offline postprocess
3747 // is needed.
3748 //TODO: for now, we don't support two streams requiring metadata at the same time.
3749 // (because we are not making copies, and metadata buffer is not reference counted.
3750 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003751 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3752 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003753 if (iter->need_metadata) {
3754 internalPproc = true;
3755 QCamera3ProcessingChannel *channel =
3756 (QCamera3ProcessingChannel *)iter->stream->priv;
3757 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003758 if(p_is_metabuf_queued != NULL) {
3759 *p_is_metabuf_queued = true;
3760 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003761 break;
3762 }
3763 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003764 for (auto itr = pendingRequest.internalRequestList.begin();
3765 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003766 if (itr->need_metadata) {
3767 internalPproc = true;
3768 QCamera3ProcessingChannel *channel =
3769 (QCamera3ProcessingChannel *)itr->stream->priv;
3770 channel->queueReprocMetadata(metadata_buf);
3771 break;
3772 }
3773 }
3774
Thierry Strudel54dc9782017-02-15 12:12:10 -08003775 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003776
3777 bool *enableZsl = nullptr;
3778 if (gExposeEnableZslKey) {
3779 enableZsl = &pendingRequest.enableZsl;
3780 }
3781
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003782 resultMetadata = translateFromHalMetadata(metadata,
3783 pendingRequest.timestamp, pendingRequest.request_id,
3784 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3785 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003786 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003787 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003788 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003789 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003790 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003791 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003792
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003793 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003794
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003795 if (pendingRequest.blob_request) {
3796 //Dump tuning metadata if enabled and available
3797 char prop[PROPERTY_VALUE_MAX];
3798 memset(prop, 0, sizeof(prop));
3799 property_get("persist.camera.dumpmetadata", prop, "0");
3800 int32_t enabled = atoi(prop);
3801 if (enabled && metadata->is_tuning_params_valid) {
3802 dumpMetadataToFile(metadata->tuning_params,
3803 mMetaFrameCount,
3804 enabled,
3805 "Snapshot",
3806 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003807 }
3808 }
3809
3810 if (!internalPproc) {
3811 LOGD("couldn't find need_metadata for this metadata");
3812 // Return metadata buffer
3813 if (free_and_bufdone_meta_buf) {
3814 mMetadataChannel->bufDone(metadata_buf);
3815 free(metadata_buf);
3816 }
3817 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003818
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003819 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003820 }
3821 }
3822
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003823 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3824
3825 // Try to send out capture result metadata.
3826 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003827 return;
3828
Thierry Strudel3d639192016-09-09 11:52:26 -07003829done_metadata:
3830 for (pendingRequestIterator i = mPendingRequestsList.begin();
3831 i != mPendingRequestsList.end() ;i++) {
3832 i->pipeline_depth++;
3833 }
3834 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3835 unblockRequestIfNecessary();
3836}
3837
3838/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003839 * FUNCTION : handleDepthDataWithLock
3840 *
3841 * DESCRIPTION: Handles incoming depth data
3842 *
3843 * PARAMETERS : @depthData : Depth data
3844 * @frameNumber: Frame number of the incoming depth data
3845 *
3846 * RETURN :
3847 *
3848 *==========================================================================*/
3849void QCamera3HardwareInterface::handleDepthDataLocked(
3850 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3851 uint32_t currentFrameNumber;
3852 buffer_handle_t *depthBuffer;
3853
3854 if (nullptr == mDepthChannel) {
3855 LOGE("Depth channel not present!");
3856 return;
3857 }
3858
3859 camera3_stream_buffer_t resultBuffer =
3860 {.acquire_fence = -1,
3861 .release_fence = -1,
3862 .status = CAMERA3_BUFFER_STATUS_OK,
3863 .buffer = nullptr,
3864 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003865 do {
3866 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3867 if (nullptr == depthBuffer) {
3868 break;
3869 }
3870
Emilian Peev7650c122017-01-19 08:24:33 -08003871 resultBuffer.buffer = depthBuffer;
3872 if (currentFrameNumber == frameNumber) {
3873 int32_t rc = mDepthChannel->populateDepthData(depthData,
3874 frameNumber);
3875 if (NO_ERROR != rc) {
3876 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3877 } else {
3878 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3879 }
3880 } else if (currentFrameNumber > frameNumber) {
3881 break;
3882 } else {
3883 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3884 {{currentFrameNumber, mDepthChannel->getStream(),
3885 CAMERA3_MSG_ERROR_BUFFER}}};
3886 orchestrateNotify(&notify_msg);
3887
3888 LOGE("Depth buffer for frame number: %d is missing "
3889 "returning back!", currentFrameNumber);
3890 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3891 }
3892 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003893 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003894 } while (currentFrameNumber < frameNumber);
3895}
3896
3897/*===========================================================================
3898 * FUNCTION : notifyErrorFoPendingDepthData
3899 *
3900 * DESCRIPTION: Returns error for any pending depth buffers
3901 *
3902 * PARAMETERS : depthCh - depth channel that needs to get flushed
3903 *
3904 * RETURN :
3905 *
3906 *==========================================================================*/
3907void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3908 QCamera3DepthChannel *depthCh) {
3909 uint32_t currentFrameNumber;
3910 buffer_handle_t *depthBuffer;
3911
3912 if (nullptr == depthCh) {
3913 return;
3914 }
3915
3916 camera3_notify_msg_t notify_msg =
3917 {.type = CAMERA3_MSG_ERROR,
3918 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3919 camera3_stream_buffer_t resultBuffer =
3920 {.acquire_fence = -1,
3921 .release_fence = -1,
3922 .buffer = nullptr,
3923 .stream = depthCh->getStream(),
3924 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08003925
3926 while (nullptr !=
3927 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3928 depthCh->unmapBuffer(currentFrameNumber);
3929
3930 notify_msg.message.error.frame_number = currentFrameNumber;
3931 orchestrateNotify(&notify_msg);
3932
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003933 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003934 };
3935}
3936
3937/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003938 * FUNCTION : hdrPlusPerfLock
3939 *
3940 * DESCRIPTION: perf lock for HDR+ using custom intent
3941 *
3942 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3943 *
3944 * RETURN : None
3945 *
3946 *==========================================================================*/
3947void QCamera3HardwareInterface::hdrPlusPerfLock(
3948 mm_camera_super_buf_t *metadata_buf)
3949{
3950 if (NULL == metadata_buf) {
3951 LOGE("metadata_buf is NULL");
3952 return;
3953 }
3954 metadata_buffer_t *metadata =
3955 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3956 int32_t *p_frame_number_valid =
3957 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3958 uint32_t *p_frame_number =
3959 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3960
3961 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3962 LOGE("%s: Invalid metadata", __func__);
3963 return;
3964 }
3965
3966 //acquire perf lock for 5 sec after the last HDR frame is captured
3967 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3968 if ((p_frame_number != NULL) &&
3969 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003970 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003971 }
3972 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003973}
3974
3975/*===========================================================================
3976 * FUNCTION : handleInputBufferWithLock
3977 *
3978 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3979 *
3980 * PARAMETERS : @frame_number: frame number of the input buffer
3981 *
3982 * RETURN :
3983 *
3984 *==========================================================================*/
3985void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3986{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003987 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003988 pendingRequestIterator i = mPendingRequestsList.begin();
3989 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3990 i++;
3991 }
3992 if (i != mPendingRequestsList.end() && i->input_buffer) {
3993 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003994 CameraMetadata settings;
3995 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3996 if(i->settings) {
3997 settings = i->settings;
3998 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3999 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004000 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004001 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004002 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004003 } else {
4004 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004005 }
4006
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004007 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4008 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4009 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004010
4011 camera3_capture_result result;
4012 memset(&result, 0, sizeof(camera3_capture_result));
4013 result.frame_number = frame_number;
4014 result.result = i->settings;
4015 result.input_buffer = i->input_buffer;
4016 result.partial_result = PARTIAL_RESULT_COUNT;
4017
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004018 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004019 LOGD("Input request metadata and input buffer frame_number = %u",
4020 i->frame_number);
4021 i = erasePendingRequest(i);
4022 } else {
4023 LOGE("Could not find input request for frame number %d", frame_number);
4024 }
4025}
4026
4027/*===========================================================================
4028 * FUNCTION : handleBufferWithLock
4029 *
4030 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4031 *
4032 * PARAMETERS : @buffer: image buffer for the callback
4033 * @frame_number: frame number of the image buffer
4034 *
4035 * RETURN :
4036 *
4037 *==========================================================================*/
4038void QCamera3HardwareInterface::handleBufferWithLock(
4039 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4040{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004041 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004042
4043 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4044 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4045 }
4046
Thierry Strudel3d639192016-09-09 11:52:26 -07004047 /* Nothing to be done during error state */
4048 if ((ERROR == mState) || (DEINIT == mState)) {
4049 return;
4050 }
4051 if (mFlushPerf) {
4052 handleBuffersDuringFlushLock(buffer);
4053 return;
4054 }
4055 //not in flush
4056 // If the frame number doesn't exist in the pending request list,
4057 // directly send the buffer to the frameworks, and update pending buffers map
4058 // Otherwise, book-keep the buffer.
4059 pendingRequestIterator i = mPendingRequestsList.begin();
4060 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4061 i++;
4062 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004063
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004064 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004065 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004066 // For a reprocessing request, try to send out result metadata.
4067 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004068 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004069 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004070
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004071 // Check if this frame was dropped.
4072 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4073 m != mPendingFrameDropList.end(); m++) {
4074 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4075 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4076 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4077 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4078 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4079 frame_number, streamID);
4080 m = mPendingFrameDropList.erase(m);
4081 break;
4082 }
4083 }
4084
4085 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4086 LOGH("result frame_number = %d, buffer = %p",
4087 frame_number, buffer->buffer);
4088
4089 mPendingBuffersMap.removeBuf(buffer->buffer);
4090 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4091
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004092 if (mPreviewStarted == false) {
4093 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4094 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004095 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4096
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004097 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4098 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4099 mPreviewStarted = true;
4100
4101 // Set power hint for preview
4102 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4103 }
4104 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004105}
4106
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004107void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004108 const camera_metadata_t *resultMetadata)
4109{
4110 // Find the pending request for this result metadata.
4111 auto requestIter = mPendingRequestsList.begin();
4112 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4113 requestIter++;
4114 }
4115
4116 if (requestIter == mPendingRequestsList.end()) {
4117 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4118 return;
4119 }
4120
4121 // Update the result metadata
4122 requestIter->resultMetadata = resultMetadata;
4123
4124 // Check what type of request this is.
4125 bool liveRequest = false;
4126 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004127 // HDR+ request doesn't have partial results.
4128 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004129 } else if (requestIter->input_buffer != nullptr) {
4130 // Reprocessing request result is the same as settings.
4131 requestIter->resultMetadata = requestIter->settings;
4132 // Reprocessing request doesn't have partial results.
4133 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4134 } else {
4135 liveRequest = true;
4136 requestIter->partial_result_cnt++;
4137 mPendingLiveRequest--;
4138
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004139 {
4140 Mutex::Autolock l(gHdrPlusClientLock);
4141 // For a live request, send the metadata to HDR+ client.
4142 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4143 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4144 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4145 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004146 }
4147 }
4148
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004149 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4150 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004151 bool readyToSend = true;
4152
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004153 // Iterate through the pending requests to send out result metadata that are ready. Also if
4154 // this result metadata belongs to a live request, notify errors for previous live requests
4155 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004156 auto iter = mPendingRequestsList.begin();
4157 while (iter != mPendingRequestsList.end()) {
4158 // Check if current pending request is ready. If it's not ready, the following pending
4159 // requests are also not ready.
4160 if (readyToSend && iter->resultMetadata == nullptr) {
4161 readyToSend = false;
4162 }
4163
4164 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4165
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004166 camera3_capture_result_t result = {};
4167 result.frame_number = iter->frame_number;
4168 result.result = iter->resultMetadata;
4169 result.partial_result = iter->partial_result_cnt;
4170
4171 // If this pending buffer has result metadata, we may be able to send out shutter callback
4172 // and result metadata.
4173 if (iter->resultMetadata != nullptr) {
4174 if (!readyToSend) {
4175 // If any of the previous pending request is not ready, this pending request is
4176 // also not ready to send in order to keep shutter callbacks and result metadata
4177 // in order.
4178 iter++;
4179 continue;
4180 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004181 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4182 // If the result metadata belongs to a live request, notify errors for previous pending
4183 // live requests.
4184 mPendingLiveRequest--;
4185
4186 CameraMetadata dummyMetadata;
4187 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4188 result.result = dummyMetadata.release();
4189
4190 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004191
4192 // partial_result should be PARTIAL_RESULT_CNT in case of
4193 // ERROR_RESULT.
4194 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4195 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004196 } else {
4197 iter++;
4198 continue;
4199 }
4200
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004201 result.output_buffers = nullptr;
4202 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004203 orchestrateResult(&result);
4204
4205 // For reprocessing, result metadata is the same as settings so do not free it here to
4206 // avoid double free.
4207 if (result.result != iter->settings) {
4208 free_camera_metadata((camera_metadata_t *)result.result);
4209 }
4210 iter->resultMetadata = nullptr;
4211 iter = erasePendingRequest(iter);
4212 }
4213
4214 if (liveRequest) {
4215 for (auto &iter : mPendingRequestsList) {
4216 // Increment pipeline depth for the following pending requests.
4217 if (iter.frame_number > frameNumber) {
4218 iter.pipeline_depth++;
4219 }
4220 }
4221 }
4222
4223 unblockRequestIfNecessary();
4224}
4225
Thierry Strudel3d639192016-09-09 11:52:26 -07004226/*===========================================================================
4227 * FUNCTION : unblockRequestIfNecessary
4228 *
4229 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4230 * that mMutex is held when this function is called.
4231 *
4232 * PARAMETERS :
4233 *
4234 * RETURN :
4235 *
4236 *==========================================================================*/
4237void QCamera3HardwareInterface::unblockRequestIfNecessary()
4238{
4239 // Unblock process_capture_request
4240 pthread_cond_signal(&mRequestCond);
4241}
4242
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004243/*===========================================================================
4244 * FUNCTION : isHdrSnapshotRequest
4245 *
4246 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4247 *
4248 * PARAMETERS : camera3 request structure
4249 *
4250 * RETURN : boolean decision variable
4251 *
4252 *==========================================================================*/
4253bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4254{
4255 if (request == NULL) {
4256 LOGE("Invalid request handle");
4257 assert(0);
4258 return false;
4259 }
4260
4261 if (!mForceHdrSnapshot) {
4262 CameraMetadata frame_settings;
4263 frame_settings = request->settings;
4264
4265 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4266 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4267 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4268 return false;
4269 }
4270 } else {
4271 return false;
4272 }
4273
4274 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4275 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4276 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4277 return false;
4278 }
4279 } else {
4280 return false;
4281 }
4282 }
4283
4284 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4285 if (request->output_buffers[i].stream->format
4286 == HAL_PIXEL_FORMAT_BLOB) {
4287 return true;
4288 }
4289 }
4290
4291 return false;
4292}
4293/*===========================================================================
4294 * FUNCTION : orchestrateRequest
4295 *
4296 * DESCRIPTION: Orchestrates a capture request from camera service
4297 *
4298 * PARAMETERS :
4299 * @request : request from framework to process
4300 *
4301 * RETURN : Error status codes
4302 *
4303 *==========================================================================*/
4304int32_t QCamera3HardwareInterface::orchestrateRequest(
4305 camera3_capture_request_t *request)
4306{
4307
4308 uint32_t originalFrameNumber = request->frame_number;
4309 uint32_t originalOutputCount = request->num_output_buffers;
4310 const camera_metadata_t *original_settings = request->settings;
4311 List<InternalRequest> internallyRequestedStreams;
4312 List<InternalRequest> emptyInternalList;
4313
4314 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4315 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4316 uint32_t internalFrameNumber;
4317 CameraMetadata modified_meta;
4318
4319
4320 /* Add Blob channel to list of internally requested streams */
4321 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4322 if (request->output_buffers[i].stream->format
4323 == HAL_PIXEL_FORMAT_BLOB) {
4324 InternalRequest streamRequested;
4325 streamRequested.meteringOnly = 1;
4326 streamRequested.need_metadata = 0;
4327 streamRequested.stream = request->output_buffers[i].stream;
4328 internallyRequestedStreams.push_back(streamRequested);
4329 }
4330 }
4331 request->num_output_buffers = 0;
4332 auto itr = internallyRequestedStreams.begin();
4333
4334 /* Modify setting to set compensation */
4335 modified_meta = request->settings;
4336 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4337 uint8_t aeLock = 1;
4338 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4339 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4340 camera_metadata_t *modified_settings = modified_meta.release();
4341 request->settings = modified_settings;
4342
4343 /* Capture Settling & -2x frame */
4344 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4345 request->frame_number = internalFrameNumber;
4346 processCaptureRequest(request, internallyRequestedStreams);
4347
4348 request->num_output_buffers = originalOutputCount;
4349 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4350 request->frame_number = internalFrameNumber;
4351 processCaptureRequest(request, emptyInternalList);
4352 request->num_output_buffers = 0;
4353
4354 modified_meta = modified_settings;
4355 expCompensation = 0;
4356 aeLock = 1;
4357 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4358 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4359 modified_settings = modified_meta.release();
4360 request->settings = modified_settings;
4361
4362 /* Capture Settling & 0X frame */
4363
4364 itr = internallyRequestedStreams.begin();
4365 if (itr == internallyRequestedStreams.end()) {
4366 LOGE("Error Internally Requested Stream list is empty");
4367 assert(0);
4368 } else {
4369 itr->need_metadata = 0;
4370 itr->meteringOnly = 1;
4371 }
4372
4373 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4374 request->frame_number = internalFrameNumber;
4375 processCaptureRequest(request, internallyRequestedStreams);
4376
4377 itr = internallyRequestedStreams.begin();
4378 if (itr == internallyRequestedStreams.end()) {
4379 ALOGE("Error Internally Requested Stream list is empty");
4380 assert(0);
4381 } else {
4382 itr->need_metadata = 1;
4383 itr->meteringOnly = 0;
4384 }
4385
4386 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4387 request->frame_number = internalFrameNumber;
4388 processCaptureRequest(request, internallyRequestedStreams);
4389
4390 /* Capture 2X frame*/
4391 modified_meta = modified_settings;
4392 expCompensation = GB_HDR_2X_STEP_EV;
4393 aeLock = 1;
4394 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4395 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4396 modified_settings = modified_meta.release();
4397 request->settings = modified_settings;
4398
4399 itr = internallyRequestedStreams.begin();
4400 if (itr == internallyRequestedStreams.end()) {
4401 ALOGE("Error Internally Requested Stream list is empty");
4402 assert(0);
4403 } else {
4404 itr->need_metadata = 0;
4405 itr->meteringOnly = 1;
4406 }
4407 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4408 request->frame_number = internalFrameNumber;
4409 processCaptureRequest(request, internallyRequestedStreams);
4410
4411 itr = internallyRequestedStreams.begin();
4412 if (itr == internallyRequestedStreams.end()) {
4413 ALOGE("Error Internally Requested Stream list is empty");
4414 assert(0);
4415 } else {
4416 itr->need_metadata = 1;
4417 itr->meteringOnly = 0;
4418 }
4419
4420 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4421 request->frame_number = internalFrameNumber;
4422 processCaptureRequest(request, internallyRequestedStreams);
4423
4424
4425 /* Capture 2X on original streaming config*/
4426 internallyRequestedStreams.clear();
4427
4428 /* Restore original settings pointer */
4429 request->settings = original_settings;
4430 } else {
4431 uint32_t internalFrameNumber;
4432 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4433 request->frame_number = internalFrameNumber;
4434 return processCaptureRequest(request, internallyRequestedStreams);
4435 }
4436
4437 return NO_ERROR;
4438}
4439
4440/*===========================================================================
4441 * FUNCTION : orchestrateResult
4442 *
4443 * DESCRIPTION: Orchestrates a capture result to camera service
4444 *
4445 * PARAMETERS :
4446 * @request : request from framework to process
4447 *
4448 * RETURN :
4449 *
4450 *==========================================================================*/
4451void QCamera3HardwareInterface::orchestrateResult(
4452 camera3_capture_result_t *result)
4453{
4454 uint32_t frameworkFrameNumber;
4455 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4456 frameworkFrameNumber);
4457 if (rc != NO_ERROR) {
4458 LOGE("Cannot find translated frameworkFrameNumber");
4459 assert(0);
4460 } else {
4461 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004462 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004463 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004464 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004465 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4466 camera_metadata_entry_t entry;
4467 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4468 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004469 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004470 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4471 if (ret != OK)
4472 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004473 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004474 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004475 result->frame_number = frameworkFrameNumber;
4476 mCallbackOps->process_capture_result(mCallbackOps, result);
4477 }
4478 }
4479}
4480
4481/*===========================================================================
4482 * FUNCTION : orchestrateNotify
4483 *
4484 * DESCRIPTION: Orchestrates a notify to camera service
4485 *
4486 * PARAMETERS :
4487 * @request : request from framework to process
4488 *
4489 * RETURN :
4490 *
4491 *==========================================================================*/
4492void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4493{
4494 uint32_t frameworkFrameNumber;
4495 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004496 int32_t rc = NO_ERROR;
4497
4498 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004499 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004500
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004501 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004502 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4503 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4504 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004505 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004506 LOGE("Cannot find translated frameworkFrameNumber");
4507 assert(0);
4508 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004509 }
4510 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004511
4512 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4513 LOGD("Internal Request drop the notifyCb");
4514 } else {
4515 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4516 mCallbackOps->notify(mCallbackOps, notify_msg);
4517 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004518}
4519
4520/*===========================================================================
4521 * FUNCTION : FrameNumberRegistry
4522 *
4523 * DESCRIPTION: Constructor
4524 *
4525 * PARAMETERS :
4526 *
4527 * RETURN :
4528 *
4529 *==========================================================================*/
4530FrameNumberRegistry::FrameNumberRegistry()
4531{
4532 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4533}
4534
4535/*===========================================================================
4536 * FUNCTION : ~FrameNumberRegistry
4537 *
4538 * DESCRIPTION: Destructor
4539 *
4540 * PARAMETERS :
4541 *
4542 * RETURN :
4543 *
4544 *==========================================================================*/
4545FrameNumberRegistry::~FrameNumberRegistry()
4546{
4547}
4548
4549/*===========================================================================
4550 * FUNCTION : PurgeOldEntriesLocked
4551 *
4552 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4553 *
4554 * PARAMETERS :
4555 *
4556 * RETURN : NONE
4557 *
4558 *==========================================================================*/
4559void FrameNumberRegistry::purgeOldEntriesLocked()
4560{
4561 while (_register.begin() != _register.end()) {
4562 auto itr = _register.begin();
4563 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4564 _register.erase(itr);
4565 } else {
4566 return;
4567 }
4568 }
4569}
4570
4571/*===========================================================================
4572 * FUNCTION : allocStoreInternalFrameNumber
4573 *
4574 * DESCRIPTION: Method to note down a framework request and associate a new
4575 * internal request number against it
4576 *
4577 * PARAMETERS :
4578 * @fFrameNumber: Identifier given by framework
4579 * @internalFN : Output parameter which will have the newly generated internal
4580 * entry
4581 *
4582 * RETURN : Error code
4583 *
4584 *==========================================================================*/
4585int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4586 uint32_t &internalFrameNumber)
4587{
4588 Mutex::Autolock lock(mRegistryLock);
4589 internalFrameNumber = _nextFreeInternalNumber++;
4590 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4591 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4592 purgeOldEntriesLocked();
4593 return NO_ERROR;
4594}
4595
4596/*===========================================================================
4597 * FUNCTION : generateStoreInternalFrameNumber
4598 *
4599 * DESCRIPTION: Method to associate a new internal request number independent
4600 * of any associate with framework requests
4601 *
4602 * PARAMETERS :
4603 * @internalFrame#: Output parameter which will have the newly generated internal
4604 *
4605 *
4606 * RETURN : Error code
4607 *
4608 *==========================================================================*/
4609int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4610{
4611 Mutex::Autolock lock(mRegistryLock);
4612 internalFrameNumber = _nextFreeInternalNumber++;
4613 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4614 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4615 purgeOldEntriesLocked();
4616 return NO_ERROR;
4617}
4618
4619/*===========================================================================
4620 * FUNCTION : getFrameworkFrameNumber
4621 *
4622 * DESCRIPTION: Method to query the framework framenumber given an internal #
4623 *
4624 * PARAMETERS :
4625 * @internalFrame#: Internal reference
4626 * @frameworkframenumber: Output parameter holding framework frame entry
4627 *
4628 * RETURN : Error code
4629 *
4630 *==========================================================================*/
4631int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4632 uint32_t &frameworkFrameNumber)
4633{
4634 Mutex::Autolock lock(mRegistryLock);
4635 auto itr = _register.find(internalFrameNumber);
4636 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004637 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004638 return -ENOENT;
4639 }
4640
4641 frameworkFrameNumber = itr->second;
4642 purgeOldEntriesLocked();
4643 return NO_ERROR;
4644}
Thierry Strudel3d639192016-09-09 11:52:26 -07004645
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004646status_t QCamera3HardwareInterface::fillPbStreamConfig(
4647 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4648 QCamera3Channel *channel, uint32_t streamIndex) {
4649 if (config == nullptr) {
4650 LOGE("%s: config is null", __FUNCTION__);
4651 return BAD_VALUE;
4652 }
4653
4654 if (channel == nullptr) {
4655 LOGE("%s: channel is null", __FUNCTION__);
4656 return BAD_VALUE;
4657 }
4658
4659 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4660 if (stream == nullptr) {
4661 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4662 return NAME_NOT_FOUND;
4663 }
4664
4665 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4666 if (streamInfo == nullptr) {
4667 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4668 return NAME_NOT_FOUND;
4669 }
4670
4671 config->id = pbStreamId;
4672 config->image.width = streamInfo->dim.width;
4673 config->image.height = streamInfo->dim.height;
4674 config->image.padding = 0;
4675 config->image.format = pbStreamFormat;
4676
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004677 uint32_t totalPlaneSize = 0;
4678
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004679 // Fill plane information.
4680 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4681 pbcamera::PlaneConfiguration plane;
4682 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4683 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4684 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004685
4686 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004687 }
4688
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004689 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004690 return OK;
4691}
4692
Thierry Strudel3d639192016-09-09 11:52:26 -07004693/*===========================================================================
4694 * FUNCTION : processCaptureRequest
4695 *
4696 * DESCRIPTION: process a capture request from camera service
4697 *
4698 * PARAMETERS :
4699 * @request : request from framework to process
4700 *
4701 * RETURN :
4702 *
4703 *==========================================================================*/
4704int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004705 camera3_capture_request_t *request,
4706 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004707{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004708 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004709 int rc = NO_ERROR;
4710 int32_t request_id;
4711 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004712 bool isVidBufRequested = false;
4713 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004714 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004715
4716 pthread_mutex_lock(&mMutex);
4717
4718 // Validate current state
4719 switch (mState) {
4720 case CONFIGURED:
4721 case STARTED:
4722 /* valid state */
4723 break;
4724
4725 case ERROR:
4726 pthread_mutex_unlock(&mMutex);
4727 handleCameraDeviceError();
4728 return -ENODEV;
4729
4730 default:
4731 LOGE("Invalid state %d", mState);
4732 pthread_mutex_unlock(&mMutex);
4733 return -ENODEV;
4734 }
4735
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004736 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004737 if (rc != NO_ERROR) {
4738 LOGE("incoming request is not valid");
4739 pthread_mutex_unlock(&mMutex);
4740 return rc;
4741 }
4742
4743 meta = request->settings;
4744
4745 // For first capture request, send capture intent, and
4746 // stream on all streams
4747 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004748 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004749 // send an unconfigure to the backend so that the isp
4750 // resources are deallocated
4751 if (!mFirstConfiguration) {
4752 cam_stream_size_info_t stream_config_info;
4753 int32_t hal_version = CAM_HAL_V3;
4754 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4755 stream_config_info.buffer_info.min_buffers =
4756 MIN_INFLIGHT_REQUESTS;
4757 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004758 m_bIs4KVideo ? 0 :
4759 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004760 clear_metadata_buffer(mParameters);
4761 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4762 CAM_INTF_PARM_HAL_VERSION, hal_version);
4763 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4764 CAM_INTF_META_STREAM_INFO, stream_config_info);
4765 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4766 mParameters);
4767 if (rc < 0) {
4768 LOGE("set_parms for unconfigure failed");
4769 pthread_mutex_unlock(&mMutex);
4770 return rc;
4771 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004772
Thierry Strudel3d639192016-09-09 11:52:26 -07004773 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004774 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004775 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004776 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004777 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004778 property_get("persist.camera.is_type", is_type_value, "4");
4779 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4780 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4781 property_get("persist.camera.is_type_preview", is_type_value, "4");
4782 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4783 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004784
4785 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4786 int32_t hal_version = CAM_HAL_V3;
4787 uint8_t captureIntent =
4788 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4789 mCaptureIntent = captureIntent;
4790 clear_metadata_buffer(mParameters);
4791 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4792 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4793 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004794 if (mFirstConfiguration) {
4795 // configure instant AEC
4796 // Instant AEC is a session based parameter and it is needed only
4797 // once per complete session after open camera.
4798 // i.e. This is set only once for the first capture request, after open camera.
4799 setInstantAEC(meta);
4800 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004801 uint8_t fwkVideoStabMode=0;
4802 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4803 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4804 }
4805
Xue Tuecac74e2017-04-17 13:58:15 -07004806 // If EIS setprop is enabled then only turn it on for video/preview
4807 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004808 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004809 int32_t vsMode;
4810 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4811 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4812 rc = BAD_VALUE;
4813 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004814 LOGD("setEis %d", setEis);
4815 bool eis3Supported = false;
4816 size_t count = IS_TYPE_MAX;
4817 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4818 for (size_t i = 0; i < count; i++) {
4819 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4820 eis3Supported = true;
4821 break;
4822 }
4823 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004824
4825 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004826 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004827 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4828 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004829 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4830 is_type = isTypePreview;
4831 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4832 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4833 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004834 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004835 } else {
4836 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004837 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004838 } else {
4839 is_type = IS_TYPE_NONE;
4840 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004841 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004842 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004843 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4844 }
4845 }
4846
4847 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4848 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4849
Thierry Strudel54dc9782017-02-15 12:12:10 -08004850 //Disable tintless only if the property is set to 0
4851 memset(prop, 0, sizeof(prop));
4852 property_get("persist.camera.tintless.enable", prop, "1");
4853 int32_t tintless_value = atoi(prop);
4854
Thierry Strudel3d639192016-09-09 11:52:26 -07004855 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4856 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004857
Thierry Strudel3d639192016-09-09 11:52:26 -07004858 //Disable CDS for HFR mode or if DIS/EIS is on.
4859 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4860 //after every configure_stream
4861 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4862 (m_bIsVideo)) {
4863 int32_t cds = CAM_CDS_MODE_OFF;
4864 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4865 CAM_INTF_PARM_CDS_MODE, cds))
4866 LOGE("Failed to disable CDS for HFR mode");
4867
4868 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004869
4870 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4871 uint8_t* use_av_timer = NULL;
4872
4873 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004874 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004875 use_av_timer = &m_debug_avtimer;
4876 }
4877 else{
4878 use_av_timer =
4879 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004880 if (use_av_timer) {
4881 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4882 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004883 }
4884
4885 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4886 rc = BAD_VALUE;
4887 }
4888 }
4889
Thierry Strudel3d639192016-09-09 11:52:26 -07004890 setMobicat();
4891
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004892 uint8_t nrMode = 0;
4893 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4894 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4895 }
4896
Thierry Strudel3d639192016-09-09 11:52:26 -07004897 /* Set fps and hfr mode while sending meta stream info so that sensor
4898 * can configure appropriate streaming mode */
4899 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004900 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4901 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004902 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4903 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004904 if (rc == NO_ERROR) {
4905 int32_t max_fps =
4906 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004907 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004908 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4909 }
4910 /* For HFR, more buffers are dequeued upfront to improve the performance */
4911 if (mBatchSize) {
4912 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4913 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4914 }
4915 }
4916 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004917 LOGE("setHalFpsRange failed");
4918 }
4919 }
4920 if (meta.exists(ANDROID_CONTROL_MODE)) {
4921 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4922 rc = extractSceneMode(meta, metaMode, mParameters);
4923 if (rc != NO_ERROR) {
4924 LOGE("extractSceneMode failed");
4925 }
4926 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004927 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004928
Thierry Strudel04e026f2016-10-10 11:27:36 -07004929 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4930 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4931 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4932 rc = setVideoHdrMode(mParameters, vhdr);
4933 if (rc != NO_ERROR) {
4934 LOGE("setVideoHDR is failed");
4935 }
4936 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004937
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004938 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004939 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004940 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004941 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4942 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4943 sensorModeFullFov)) {
4944 rc = BAD_VALUE;
4945 }
4946 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004947 //TODO: validate the arguments, HSV scenemode should have only the
4948 //advertised fps ranges
4949
4950 /*set the capture intent, hal version, tintless, stream info,
4951 *and disenable parameters to the backend*/
4952 LOGD("set_parms META_STREAM_INFO " );
4953 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004954 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4955 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004956 mStreamConfigInfo.type[i],
4957 mStreamConfigInfo.stream_sizes[i].width,
4958 mStreamConfigInfo.stream_sizes[i].height,
4959 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004960 mStreamConfigInfo.format[i],
4961 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004962 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004963
Thierry Strudel3d639192016-09-09 11:52:26 -07004964 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4965 mParameters);
4966 if (rc < 0) {
4967 LOGE("set_parms failed for hal version, stream info");
4968 }
4969
Chien-Yu Chenee335912017-02-09 17:53:20 -08004970 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4971 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004972 if (rc != NO_ERROR) {
4973 LOGE("Failed to get sensor output size");
4974 pthread_mutex_unlock(&mMutex);
4975 goto error_exit;
4976 }
4977
4978 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4979 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004980 mSensorModeInfo.active_array_size.width,
4981 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004982
4983 /* Set batchmode before initializing channel. Since registerBuffer
4984 * internally initializes some of the channels, better set batchmode
4985 * even before first register buffer */
4986 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4987 it != mStreamInfo.end(); it++) {
4988 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4989 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4990 && mBatchSize) {
4991 rc = channel->setBatchSize(mBatchSize);
4992 //Disable per frame map unmap for HFR/batchmode case
4993 rc |= channel->setPerFrameMapUnmap(false);
4994 if (NO_ERROR != rc) {
4995 LOGE("Channel init failed %d", rc);
4996 pthread_mutex_unlock(&mMutex);
4997 goto error_exit;
4998 }
4999 }
5000 }
5001
5002 //First initialize all streams
5003 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5004 it != mStreamInfo.end(); it++) {
5005 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005006
5007 /* Initial value of NR mode is needed before stream on */
5008 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5010 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005011 setEis) {
5012 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5013 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5014 is_type = mStreamConfigInfo.is_type[i];
5015 break;
5016 }
5017 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005018 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005019 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005020 rc = channel->initialize(IS_TYPE_NONE);
5021 }
5022 if (NO_ERROR != rc) {
5023 LOGE("Channel initialization failed %d", rc);
5024 pthread_mutex_unlock(&mMutex);
5025 goto error_exit;
5026 }
5027 }
5028
5029 if (mRawDumpChannel) {
5030 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5031 if (rc != NO_ERROR) {
5032 LOGE("Error: Raw Dump Channel init failed");
5033 pthread_mutex_unlock(&mMutex);
5034 goto error_exit;
5035 }
5036 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005037 if (mHdrPlusRawSrcChannel) {
5038 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5039 if (rc != NO_ERROR) {
5040 LOGE("Error: HDR+ RAW Source Channel init failed");
5041 pthread_mutex_unlock(&mMutex);
5042 goto error_exit;
5043 }
5044 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005045 if (mSupportChannel) {
5046 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5047 if (rc < 0) {
5048 LOGE("Support channel initialization failed");
5049 pthread_mutex_unlock(&mMutex);
5050 goto error_exit;
5051 }
5052 }
5053 if (mAnalysisChannel) {
5054 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5055 if (rc < 0) {
5056 LOGE("Analysis channel initialization failed");
5057 pthread_mutex_unlock(&mMutex);
5058 goto error_exit;
5059 }
5060 }
5061 if (mDummyBatchChannel) {
5062 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5063 if (rc < 0) {
5064 LOGE("mDummyBatchChannel setBatchSize failed");
5065 pthread_mutex_unlock(&mMutex);
5066 goto error_exit;
5067 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005068 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005069 if (rc < 0) {
5070 LOGE("mDummyBatchChannel initialization failed");
5071 pthread_mutex_unlock(&mMutex);
5072 goto error_exit;
5073 }
5074 }
5075
5076 // Set bundle info
5077 rc = setBundleInfo();
5078 if (rc < 0) {
5079 LOGE("setBundleInfo failed %d", rc);
5080 pthread_mutex_unlock(&mMutex);
5081 goto error_exit;
5082 }
5083
5084 //update settings from app here
5085 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5086 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5087 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5088 }
5089 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5090 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5091 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5092 }
5093 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5094 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5095 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5096
5097 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5098 (mLinkedCameraId != mCameraId) ) {
5099 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5100 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005101 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005102 goto error_exit;
5103 }
5104 }
5105
5106 // add bundle related cameras
5107 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5108 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005109 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5110 &m_pDualCamCmdPtr->bundle_info;
5111 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005112 if (mIsDeviceLinked)
5113 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5114 else
5115 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5116
5117 pthread_mutex_lock(&gCamLock);
5118
5119 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5120 LOGE("Dualcam: Invalid Session Id ");
5121 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005122 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005123 goto error_exit;
5124 }
5125
5126 if (mIsMainCamera == 1) {
5127 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5128 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005129 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005130 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005131 // related session id should be session id of linked session
5132 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5133 } else {
5134 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5135 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005136 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005137 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005138 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5139 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005140 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005141 pthread_mutex_unlock(&gCamLock);
5142
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005143 rc = mCameraHandle->ops->set_dual_cam_cmd(
5144 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005145 if (rc < 0) {
5146 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005147 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005148 goto error_exit;
5149 }
5150 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005151 goto no_error;
5152error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005153 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005154 return rc;
5155no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005156 mWokenUpByDaemon = false;
5157 mPendingLiveRequest = 0;
5158 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005159 }
5160
5161 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005162 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005163
5164 if (mFlushPerf) {
5165 //we cannot accept any requests during flush
5166 LOGE("process_capture_request cannot proceed during flush");
5167 pthread_mutex_unlock(&mMutex);
5168 return NO_ERROR; //should return an error
5169 }
5170
5171 if (meta.exists(ANDROID_REQUEST_ID)) {
5172 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5173 mCurrentRequestId = request_id;
5174 LOGD("Received request with id: %d", request_id);
5175 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5176 LOGE("Unable to find request id field, \
5177 & no previous id available");
5178 pthread_mutex_unlock(&mMutex);
5179 return NAME_NOT_FOUND;
5180 } else {
5181 LOGD("Re-using old request id");
5182 request_id = mCurrentRequestId;
5183 }
5184
5185 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5186 request->num_output_buffers,
5187 request->input_buffer,
5188 frameNumber);
5189 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005190 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005191 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005192 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005193 uint32_t snapshotStreamId = 0;
5194 for (size_t i = 0; i < request->num_output_buffers; i++) {
5195 const camera3_stream_buffer_t& output = request->output_buffers[i];
5196 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5197
Emilian Peev7650c122017-01-19 08:24:33 -08005198 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5199 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005200 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005201 blob_request = 1;
5202 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5203 }
5204
5205 if (output.acquire_fence != -1) {
5206 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5207 close(output.acquire_fence);
5208 if (rc != OK) {
5209 LOGE("sync wait failed %d", rc);
5210 pthread_mutex_unlock(&mMutex);
5211 return rc;
5212 }
5213 }
5214
Emilian Peev0f3c3162017-03-15 12:57:46 +00005215 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5216 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005217 depthRequestPresent = true;
5218 continue;
5219 }
5220
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005221 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005222 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005223
5224 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5225 isVidBufRequested = true;
5226 }
5227 }
5228
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005229 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5230 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5231 itr++) {
5232 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5233 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5234 channel->getStreamID(channel->getStreamTypeMask());
5235
5236 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5237 isVidBufRequested = true;
5238 }
5239 }
5240
Thierry Strudel3d639192016-09-09 11:52:26 -07005241 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005242 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005243 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005244 }
5245 if (blob_request && mRawDumpChannel) {
5246 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005247 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005248 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005249 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005250 }
5251
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005252 {
5253 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5254 // Request a RAW buffer if
5255 // 1. mHdrPlusRawSrcChannel is valid.
5256 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5257 // 3. There is no pending HDR+ request.
5258 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5259 mHdrPlusPendingRequests.size() == 0) {
5260 streamsArray.stream_request[streamsArray.num_streams].streamID =
5261 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5262 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5263 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005264 }
5265
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005266 //extract capture intent
5267 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5268 mCaptureIntent =
5269 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5270 }
5271
5272 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5273 mCacMode =
5274 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5275 }
5276
5277 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005278 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005279
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005280 {
5281 Mutex::Autolock l(gHdrPlusClientLock);
5282 // If this request has a still capture intent, try to submit an HDR+ request.
5283 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5284 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5285 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5286 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005287 }
5288
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005289 if (hdrPlusRequest) {
5290 // For a HDR+ request, just set the frame parameters.
5291 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5292 if (rc < 0) {
5293 LOGE("fail to set frame parameters");
5294 pthread_mutex_unlock(&mMutex);
5295 return rc;
5296 }
5297 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005298 /* Parse the settings:
5299 * - For every request in NORMAL MODE
5300 * - For every request in HFR mode during preview only case
5301 * - For first request of every batch in HFR mode during video
5302 * recording. In batchmode the same settings except frame number is
5303 * repeated in each request of the batch.
5304 */
5305 if (!mBatchSize ||
5306 (mBatchSize && !isVidBufRequested) ||
5307 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005308 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005309 if (rc < 0) {
5310 LOGE("fail to set frame parameters");
5311 pthread_mutex_unlock(&mMutex);
5312 return rc;
5313 }
5314 }
5315 /* For batchMode HFR, setFrameParameters is not called for every
5316 * request. But only frame number of the latest request is parsed.
5317 * Keep track of first and last frame numbers in a batch so that
5318 * metadata for the frame numbers of batch can be duplicated in
5319 * handleBatchMetadta */
5320 if (mBatchSize) {
5321 if (!mToBeQueuedVidBufs) {
5322 //start of the batch
5323 mFirstFrameNumberInBatch = request->frame_number;
5324 }
5325 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5326 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5327 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005328 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005329 return BAD_VALUE;
5330 }
5331 }
5332 if (mNeedSensorRestart) {
5333 /* Unlock the mutex as restartSensor waits on the channels to be
5334 * stopped, which in turn calls stream callback functions -
5335 * handleBufferWithLock and handleMetadataWithLock */
5336 pthread_mutex_unlock(&mMutex);
5337 rc = dynamicUpdateMetaStreamInfo();
5338 if (rc != NO_ERROR) {
5339 LOGE("Restarting the sensor failed");
5340 return BAD_VALUE;
5341 }
5342 mNeedSensorRestart = false;
5343 pthread_mutex_lock(&mMutex);
5344 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005345 if(mResetInstantAEC) {
5346 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5347 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5348 mResetInstantAEC = false;
5349 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005350 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005351 if (request->input_buffer->acquire_fence != -1) {
5352 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5353 close(request->input_buffer->acquire_fence);
5354 if (rc != OK) {
5355 LOGE("input buffer sync wait failed %d", rc);
5356 pthread_mutex_unlock(&mMutex);
5357 return rc;
5358 }
5359 }
5360 }
5361
5362 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5363 mLastCustIntentFrmNum = frameNumber;
5364 }
5365 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005366 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005367 pendingRequestIterator latestRequest;
5368 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005369 pendingRequest.num_buffers = depthRequestPresent ?
5370 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005371 pendingRequest.request_id = request_id;
5372 pendingRequest.blob_request = blob_request;
5373 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005374 if (request->input_buffer) {
5375 pendingRequest.input_buffer =
5376 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5377 *(pendingRequest.input_buffer) = *(request->input_buffer);
5378 pInputBuffer = pendingRequest.input_buffer;
5379 } else {
5380 pendingRequest.input_buffer = NULL;
5381 pInputBuffer = NULL;
5382 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005383 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005384
5385 pendingRequest.pipeline_depth = 0;
5386 pendingRequest.partial_result_cnt = 0;
5387 extractJpegMetadata(mCurJpegMeta, request);
5388 pendingRequest.jpegMetadata = mCurJpegMeta;
5389 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005390 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005391 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5392 mHybridAeEnable =
5393 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5394 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005395
5396 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5397 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005398 /* DevCamDebug metadata processCaptureRequest */
5399 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5400 mDevCamDebugMetaEnable =
5401 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5402 }
5403 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5404 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005405
5406 //extract CAC info
5407 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5408 mCacMode =
5409 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5410 }
5411 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005412 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005413
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005414 // extract enableZsl info
5415 if (gExposeEnableZslKey) {
5416 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5417 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5418 mZslEnabled = pendingRequest.enableZsl;
5419 } else {
5420 pendingRequest.enableZsl = mZslEnabled;
5421 }
5422 }
5423
Thierry Strudel3d639192016-09-09 11:52:26 -07005424 PendingBuffersInRequest bufsForCurRequest;
5425 bufsForCurRequest.frame_number = frameNumber;
5426 // Mark current timestamp for the new request
5427 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005428 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005429
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005430 if (hdrPlusRequest) {
5431 // Save settings for this request.
5432 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5433 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5434
5435 // Add to pending HDR+ request queue.
5436 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5437 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5438
5439 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5440 }
5441
Thierry Strudel3d639192016-09-09 11:52:26 -07005442 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005443 if ((request->output_buffers[i].stream->data_space ==
5444 HAL_DATASPACE_DEPTH) &&
5445 (HAL_PIXEL_FORMAT_BLOB ==
5446 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005447 continue;
5448 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005449 RequestedBufferInfo requestedBuf;
5450 memset(&requestedBuf, 0, sizeof(requestedBuf));
5451 requestedBuf.stream = request->output_buffers[i].stream;
5452 requestedBuf.buffer = NULL;
5453 pendingRequest.buffers.push_back(requestedBuf);
5454
5455 // Add to buffer handle the pending buffers list
5456 PendingBufferInfo bufferInfo;
5457 bufferInfo.buffer = request->output_buffers[i].buffer;
5458 bufferInfo.stream = request->output_buffers[i].stream;
5459 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5460 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5461 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5462 frameNumber, bufferInfo.buffer,
5463 channel->getStreamTypeMask(), bufferInfo.stream->format);
5464 }
5465 // Add this request packet into mPendingBuffersMap
5466 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5467 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5468 mPendingBuffersMap.get_num_overall_buffers());
5469
5470 latestRequest = mPendingRequestsList.insert(
5471 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005472
5473 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5474 // for the frame number.
5475 mShutterDispatcher.expectShutter(frameNumber);
5476 for (size_t i = 0; i < request->num_output_buffers; i++) {
5477 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5478 }
5479
Thierry Strudel3d639192016-09-09 11:52:26 -07005480 if(mFlush) {
5481 LOGI("mFlush is true");
5482 pthread_mutex_unlock(&mMutex);
5483 return NO_ERROR;
5484 }
5485
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005486 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5487 // channel.
5488 if (!hdrPlusRequest) {
5489 int indexUsed;
5490 // Notify metadata channel we receive a request
5491 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005492
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005493 if(request->input_buffer != NULL){
5494 LOGD("Input request, frame_number %d", frameNumber);
5495 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5496 if (NO_ERROR != rc) {
5497 LOGE("fail to set reproc parameters");
5498 pthread_mutex_unlock(&mMutex);
5499 return rc;
5500 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005501 }
5502
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005503 // Call request on other streams
5504 uint32_t streams_need_metadata = 0;
5505 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5506 for (size_t i = 0; i < request->num_output_buffers; i++) {
5507 const camera3_stream_buffer_t& output = request->output_buffers[i];
5508 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5509
5510 if (channel == NULL) {
5511 LOGW("invalid channel pointer for stream");
5512 continue;
5513 }
5514
5515 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5516 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5517 output.buffer, request->input_buffer, frameNumber);
5518 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005519 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005520 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5521 if (rc < 0) {
5522 LOGE("Fail to request on picture channel");
5523 pthread_mutex_unlock(&mMutex);
5524 return rc;
5525 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005526 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005527 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5528 assert(NULL != mDepthChannel);
5529 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005530
Emilian Peev7650c122017-01-19 08:24:33 -08005531 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5532 if (rc < 0) {
5533 LOGE("Fail to map on depth buffer");
5534 pthread_mutex_unlock(&mMutex);
5535 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005536 }
Emilian Peev7650c122017-01-19 08:24:33 -08005537 } else {
5538 LOGD("snapshot request with buffer %p, frame_number %d",
5539 output.buffer, frameNumber);
5540 if (!request->settings) {
5541 rc = channel->request(output.buffer, frameNumber,
5542 NULL, mPrevParameters, indexUsed);
5543 } else {
5544 rc = channel->request(output.buffer, frameNumber,
5545 NULL, mParameters, indexUsed);
5546 }
5547 if (rc < 0) {
5548 LOGE("Fail to request on picture channel");
5549 pthread_mutex_unlock(&mMutex);
5550 return rc;
5551 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005552
Emilian Peev7650c122017-01-19 08:24:33 -08005553 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5554 uint32_t j = 0;
5555 for (j = 0; j < streamsArray.num_streams; j++) {
5556 if (streamsArray.stream_request[j].streamID == streamId) {
5557 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5558 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5559 else
5560 streamsArray.stream_request[j].buf_index = indexUsed;
5561 break;
5562 }
5563 }
5564 if (j == streamsArray.num_streams) {
5565 LOGE("Did not find matching stream to update index");
5566 assert(0);
5567 }
5568
5569 pendingBufferIter->need_metadata = true;
5570 streams_need_metadata++;
5571 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005572 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005573 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5574 bool needMetadata = false;
5575 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5576 rc = yuvChannel->request(output.buffer, frameNumber,
5577 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5578 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005579 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005580 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005581 pthread_mutex_unlock(&mMutex);
5582 return rc;
5583 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005584
5585 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5586 uint32_t j = 0;
5587 for (j = 0; j < streamsArray.num_streams; j++) {
5588 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005589 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5590 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5591 else
5592 streamsArray.stream_request[j].buf_index = indexUsed;
5593 break;
5594 }
5595 }
5596 if (j == streamsArray.num_streams) {
5597 LOGE("Did not find matching stream to update index");
5598 assert(0);
5599 }
5600
5601 pendingBufferIter->need_metadata = needMetadata;
5602 if (needMetadata)
5603 streams_need_metadata += 1;
5604 LOGD("calling YUV channel request, need_metadata is %d",
5605 needMetadata);
5606 } else {
5607 LOGD("request with buffer %p, frame_number %d",
5608 output.buffer, frameNumber);
5609
5610 rc = channel->request(output.buffer, frameNumber, indexUsed);
5611
5612 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5613 uint32_t j = 0;
5614 for (j = 0; j < streamsArray.num_streams; j++) {
5615 if (streamsArray.stream_request[j].streamID == streamId) {
5616 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5617 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5618 else
5619 streamsArray.stream_request[j].buf_index = indexUsed;
5620 break;
5621 }
5622 }
5623 if (j == streamsArray.num_streams) {
5624 LOGE("Did not find matching stream to update index");
5625 assert(0);
5626 }
5627
5628 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5629 && mBatchSize) {
5630 mToBeQueuedVidBufs++;
5631 if (mToBeQueuedVidBufs == mBatchSize) {
5632 channel->queueBatchBuf();
5633 }
5634 }
5635 if (rc < 0) {
5636 LOGE("request failed");
5637 pthread_mutex_unlock(&mMutex);
5638 return rc;
5639 }
5640 }
5641 pendingBufferIter++;
5642 }
5643
5644 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5645 itr++) {
5646 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5647
5648 if (channel == NULL) {
5649 LOGE("invalid channel pointer for stream");
5650 assert(0);
5651 return BAD_VALUE;
5652 }
5653
5654 InternalRequest requestedStream;
5655 requestedStream = (*itr);
5656
5657
5658 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5659 LOGD("snapshot request internally input buffer %p, frame_number %d",
5660 request->input_buffer, frameNumber);
5661 if(request->input_buffer != NULL){
5662 rc = channel->request(NULL, frameNumber,
5663 pInputBuffer, &mReprocMeta, indexUsed, true,
5664 requestedStream.meteringOnly);
5665 if (rc < 0) {
5666 LOGE("Fail to request on picture channel");
5667 pthread_mutex_unlock(&mMutex);
5668 return rc;
5669 }
5670 } else {
5671 LOGD("snapshot request with frame_number %d", frameNumber);
5672 if (!request->settings) {
5673 rc = channel->request(NULL, frameNumber,
5674 NULL, mPrevParameters, indexUsed, true,
5675 requestedStream.meteringOnly);
5676 } else {
5677 rc = channel->request(NULL, frameNumber,
5678 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5679 }
5680 if (rc < 0) {
5681 LOGE("Fail to request on picture channel");
5682 pthread_mutex_unlock(&mMutex);
5683 return rc;
5684 }
5685
5686 if ((*itr).meteringOnly != 1) {
5687 requestedStream.need_metadata = 1;
5688 streams_need_metadata++;
5689 }
5690 }
5691
5692 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5693 uint32_t j = 0;
5694 for (j = 0; j < streamsArray.num_streams; j++) {
5695 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005696 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5697 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5698 else
5699 streamsArray.stream_request[j].buf_index = indexUsed;
5700 break;
5701 }
5702 }
5703 if (j == streamsArray.num_streams) {
5704 LOGE("Did not find matching stream to update index");
5705 assert(0);
5706 }
5707
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005708 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005709 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005710 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005711 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005712 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005713 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005714 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005715
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005716 //If 2 streams have need_metadata set to true, fail the request, unless
5717 //we copy/reference count the metadata buffer
5718 if (streams_need_metadata > 1) {
5719 LOGE("not supporting request in which two streams requires"
5720 " 2 HAL metadata for reprocessing");
5721 pthread_mutex_unlock(&mMutex);
5722 return -EINVAL;
5723 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005724
Emilian Peev7650c122017-01-19 08:24:33 -08005725 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5726 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5727 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5728 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5729 pthread_mutex_unlock(&mMutex);
5730 return BAD_VALUE;
5731 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005732 if (request->input_buffer == NULL) {
5733 /* Set the parameters to backend:
5734 * - For every request in NORMAL MODE
5735 * - For every request in HFR mode during preview only case
5736 * - Once every batch in HFR mode during video recording
5737 */
5738 if (!mBatchSize ||
5739 (mBatchSize && !isVidBufRequested) ||
5740 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5741 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5742 mBatchSize, isVidBufRequested,
5743 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005744
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005745 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5746 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5747 uint32_t m = 0;
5748 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5749 if (streamsArray.stream_request[k].streamID ==
5750 mBatchedStreamsArray.stream_request[m].streamID)
5751 break;
5752 }
5753 if (m == mBatchedStreamsArray.num_streams) {
5754 mBatchedStreamsArray.stream_request\
5755 [mBatchedStreamsArray.num_streams].streamID =
5756 streamsArray.stream_request[k].streamID;
5757 mBatchedStreamsArray.stream_request\
5758 [mBatchedStreamsArray.num_streams].buf_index =
5759 streamsArray.stream_request[k].buf_index;
5760 mBatchedStreamsArray.num_streams =
5761 mBatchedStreamsArray.num_streams + 1;
5762 }
5763 }
5764 streamsArray = mBatchedStreamsArray;
5765 }
5766 /* Update stream id of all the requested buffers */
5767 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5768 streamsArray)) {
5769 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005770 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005771 return BAD_VALUE;
5772 }
5773
5774 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5775 mParameters);
5776 if (rc < 0) {
5777 LOGE("set_parms failed");
5778 }
5779 /* reset to zero coz, the batch is queued */
5780 mToBeQueuedVidBufs = 0;
5781 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5782 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5783 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005784 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5785 uint32_t m = 0;
5786 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5787 if (streamsArray.stream_request[k].streamID ==
5788 mBatchedStreamsArray.stream_request[m].streamID)
5789 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005790 }
5791 if (m == mBatchedStreamsArray.num_streams) {
5792 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5793 streamID = streamsArray.stream_request[k].streamID;
5794 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5795 buf_index = streamsArray.stream_request[k].buf_index;
5796 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5797 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005798 }
5799 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005800 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005801
5802 // Start all streams after the first setting is sent, so that the
5803 // setting can be applied sooner: (0 + apply_delay)th frame.
5804 if (mState == CONFIGURED && mChannelHandle) {
5805 //Then start them.
5806 LOGH("Start META Channel");
5807 rc = mMetadataChannel->start();
5808 if (rc < 0) {
5809 LOGE("META channel start failed");
5810 pthread_mutex_unlock(&mMutex);
5811 return rc;
5812 }
5813
5814 if (mAnalysisChannel) {
5815 rc = mAnalysisChannel->start();
5816 if (rc < 0) {
5817 LOGE("Analysis channel start failed");
5818 mMetadataChannel->stop();
5819 pthread_mutex_unlock(&mMutex);
5820 return rc;
5821 }
5822 }
5823
5824 if (mSupportChannel) {
5825 rc = mSupportChannel->start();
5826 if (rc < 0) {
5827 LOGE("Support channel start failed");
5828 mMetadataChannel->stop();
5829 /* Although support and analysis are mutually exclusive today
5830 adding it in anycase for future proofing */
5831 if (mAnalysisChannel) {
5832 mAnalysisChannel->stop();
5833 }
5834 pthread_mutex_unlock(&mMutex);
5835 return rc;
5836 }
5837 }
5838 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5839 it != mStreamInfo.end(); it++) {
5840 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5841 LOGH("Start Processing Channel mask=%d",
5842 channel->getStreamTypeMask());
5843 rc = channel->start();
5844 if (rc < 0) {
5845 LOGE("channel start failed");
5846 pthread_mutex_unlock(&mMutex);
5847 return rc;
5848 }
5849 }
5850
5851 if (mRawDumpChannel) {
5852 LOGD("Starting raw dump stream");
5853 rc = mRawDumpChannel->start();
5854 if (rc != NO_ERROR) {
5855 LOGE("Error Starting Raw Dump Channel");
5856 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5857 it != mStreamInfo.end(); it++) {
5858 QCamera3Channel *channel =
5859 (QCamera3Channel *)(*it)->stream->priv;
5860 LOGH("Stopping Processing Channel mask=%d",
5861 channel->getStreamTypeMask());
5862 channel->stop();
5863 }
5864 if (mSupportChannel)
5865 mSupportChannel->stop();
5866 if (mAnalysisChannel) {
5867 mAnalysisChannel->stop();
5868 }
5869 mMetadataChannel->stop();
5870 pthread_mutex_unlock(&mMutex);
5871 return rc;
5872 }
5873 }
5874
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005875 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005876 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005877 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005878 if (rc != NO_ERROR) {
5879 LOGE("start_channel failed %d", rc);
5880 pthread_mutex_unlock(&mMutex);
5881 return rc;
5882 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005883
5884 {
5885 // Configure Easel for stream on.
5886 Mutex::Autolock l(gHdrPlusClientLock);
5887 if (EaselManagerClientOpened) {
5888 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chena6c99062017-05-23 13:45:06 -07005889 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
5890 /*enableIpu*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005891 if (rc != OK) {
5892 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5893 mCameraId, mSensorModeInfo.op_pixel_clk);
5894 pthread_mutex_unlock(&mMutex);
5895 return rc;
5896 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005897 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005898 }
5899 }
5900
5901 // Start sensor streaming.
5902 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5903 mChannelHandle);
5904 if (rc != NO_ERROR) {
5905 LOGE("start_sensor_stream_on failed %d", rc);
5906 pthread_mutex_unlock(&mMutex);
5907 return rc;
5908 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005909 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005910 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005911 }
5912
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005913 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen3b630e52017-06-02 15:39:47 -07005914 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || mCameraId == 0) {
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005915 Mutex::Autolock l(gHdrPlusClientLock);
5916 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5917 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5918 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5919 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5920 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5921 rc = enableHdrPlusModeLocked();
5922 if (rc != OK) {
5923 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
5924 pthread_mutex_unlock(&mMutex);
5925 return rc;
5926 }
5927
5928 mFirstPreviewIntentSeen = true;
5929 }
5930 }
5931
Thierry Strudel3d639192016-09-09 11:52:26 -07005932 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5933
5934 mState = STARTED;
5935 // Added a timed condition wait
5936 struct timespec ts;
5937 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005938 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005939 if (rc < 0) {
5940 isValidTimeout = 0;
5941 LOGE("Error reading the real time clock!!");
5942 }
5943 else {
5944 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005945 int64_t timeout = 5;
5946 {
5947 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5948 // If there is a pending HDR+ request, the following requests may be blocked until the
5949 // HDR+ request is done. So allow a longer timeout.
5950 if (mHdrPlusPendingRequests.size() > 0) {
5951 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5952 }
5953 }
5954 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005955 }
5956 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005957 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005958 (mState != ERROR) && (mState != DEINIT)) {
5959 if (!isValidTimeout) {
5960 LOGD("Blocking on conditional wait");
5961 pthread_cond_wait(&mRequestCond, &mMutex);
5962 }
5963 else {
5964 LOGD("Blocking on timed conditional wait");
5965 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5966 if (rc == ETIMEDOUT) {
5967 rc = -ENODEV;
5968 LOGE("Unblocked on timeout!!!!");
5969 break;
5970 }
5971 }
5972 LOGD("Unblocked");
5973 if (mWokenUpByDaemon) {
5974 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005975 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005976 break;
5977 }
5978 }
5979 pthread_mutex_unlock(&mMutex);
5980
5981 return rc;
5982}
5983
5984/*===========================================================================
5985 * FUNCTION : dump
5986 *
5987 * DESCRIPTION:
5988 *
5989 * PARAMETERS :
5990 *
5991 *
5992 * RETURN :
5993 *==========================================================================*/
5994void QCamera3HardwareInterface::dump(int fd)
5995{
5996 pthread_mutex_lock(&mMutex);
5997 dprintf(fd, "\n Camera HAL3 information Begin \n");
5998
5999 dprintf(fd, "\nNumber of pending requests: %zu \n",
6000 mPendingRequestsList.size());
6001 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6002 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6003 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6004 for(pendingRequestIterator i = mPendingRequestsList.begin();
6005 i != mPendingRequestsList.end(); i++) {
6006 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6007 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6008 i->input_buffer);
6009 }
6010 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6011 mPendingBuffersMap.get_num_overall_buffers());
6012 dprintf(fd, "-------+------------------\n");
6013 dprintf(fd, " Frame | Stream type mask \n");
6014 dprintf(fd, "-------+------------------\n");
6015 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6016 for(auto &j : req.mPendingBufferList) {
6017 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6018 dprintf(fd, " %5d | %11d \n",
6019 req.frame_number, channel->getStreamTypeMask());
6020 }
6021 }
6022 dprintf(fd, "-------+------------------\n");
6023
6024 dprintf(fd, "\nPending frame drop list: %zu\n",
6025 mPendingFrameDropList.size());
6026 dprintf(fd, "-------+-----------\n");
6027 dprintf(fd, " Frame | Stream ID \n");
6028 dprintf(fd, "-------+-----------\n");
6029 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6030 i != mPendingFrameDropList.end(); i++) {
6031 dprintf(fd, " %5d | %9d \n",
6032 i->frame_number, i->stream_ID);
6033 }
6034 dprintf(fd, "-------+-----------\n");
6035
6036 dprintf(fd, "\n Camera HAL3 information End \n");
6037
6038 /* use dumpsys media.camera as trigger to send update debug level event */
6039 mUpdateDebugLevel = true;
6040 pthread_mutex_unlock(&mMutex);
6041 return;
6042}
6043
6044/*===========================================================================
6045 * FUNCTION : flush
6046 *
6047 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6048 * conditionally restarts channels
6049 *
6050 * PARAMETERS :
6051 * @ restartChannels: re-start all channels
6052 *
6053 *
6054 * RETURN :
6055 * 0 on success
6056 * Error code on failure
6057 *==========================================================================*/
6058int QCamera3HardwareInterface::flush(bool restartChannels)
6059{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006060 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006061 int32_t rc = NO_ERROR;
6062
6063 LOGD("Unblocking Process Capture Request");
6064 pthread_mutex_lock(&mMutex);
6065 mFlush = true;
6066 pthread_mutex_unlock(&mMutex);
6067
6068 rc = stopAllChannels();
6069 // unlink of dualcam
6070 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006071 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6072 &m_pDualCamCmdPtr->bundle_info;
6073 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006074 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6075 pthread_mutex_lock(&gCamLock);
6076
6077 if (mIsMainCamera == 1) {
6078 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6079 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006080 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006081 // related session id should be session id of linked session
6082 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6083 } else {
6084 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6085 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006086 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006087 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6088 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006089 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006090 pthread_mutex_unlock(&gCamLock);
6091
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006092 rc = mCameraHandle->ops->set_dual_cam_cmd(
6093 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006094 if (rc < 0) {
6095 LOGE("Dualcam: Unlink failed, but still proceed to close");
6096 }
6097 }
6098
6099 if (rc < 0) {
6100 LOGE("stopAllChannels failed");
6101 return rc;
6102 }
6103 if (mChannelHandle) {
6104 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6105 mChannelHandle);
6106 }
6107
6108 // Reset bundle info
6109 rc = setBundleInfo();
6110 if (rc < 0) {
6111 LOGE("setBundleInfo failed %d", rc);
6112 return rc;
6113 }
6114
6115 // Mutex Lock
6116 pthread_mutex_lock(&mMutex);
6117
6118 // Unblock process_capture_request
6119 mPendingLiveRequest = 0;
6120 pthread_cond_signal(&mRequestCond);
6121
6122 rc = notifyErrorForPendingRequests();
6123 if (rc < 0) {
6124 LOGE("notifyErrorForPendingRequests failed");
6125 pthread_mutex_unlock(&mMutex);
6126 return rc;
6127 }
6128
6129 mFlush = false;
6130
6131 // Start the Streams/Channels
6132 if (restartChannels) {
6133 rc = startAllChannels();
6134 if (rc < 0) {
6135 LOGE("startAllChannels failed");
6136 pthread_mutex_unlock(&mMutex);
6137 return rc;
6138 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006139 if (mChannelHandle) {
6140 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006141 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006142 if (rc < 0) {
6143 LOGE("start_channel failed");
6144 pthread_mutex_unlock(&mMutex);
6145 return rc;
6146 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006147 }
6148 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006149 pthread_mutex_unlock(&mMutex);
6150
6151 return 0;
6152}
6153
6154/*===========================================================================
6155 * FUNCTION : flushPerf
6156 *
6157 * DESCRIPTION: This is the performance optimization version of flush that does
6158 * not use stream off, rather flushes the system
6159 *
6160 * PARAMETERS :
6161 *
6162 *
6163 * RETURN : 0 : success
6164 * -EINVAL: input is malformed (device is not valid)
6165 * -ENODEV: if the device has encountered a serious error
6166 *==========================================================================*/
6167int QCamera3HardwareInterface::flushPerf()
6168{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006169 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006170 int32_t rc = 0;
6171 struct timespec timeout;
6172 bool timed_wait = false;
6173
6174 pthread_mutex_lock(&mMutex);
6175 mFlushPerf = true;
6176 mPendingBuffersMap.numPendingBufsAtFlush =
6177 mPendingBuffersMap.get_num_overall_buffers();
6178 LOGD("Calling flush. Wait for %d buffers to return",
6179 mPendingBuffersMap.numPendingBufsAtFlush);
6180
6181 /* send the flush event to the backend */
6182 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6183 if (rc < 0) {
6184 LOGE("Error in flush: IOCTL failure");
6185 mFlushPerf = false;
6186 pthread_mutex_unlock(&mMutex);
6187 return -ENODEV;
6188 }
6189
6190 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6191 LOGD("No pending buffers in HAL, return flush");
6192 mFlushPerf = false;
6193 pthread_mutex_unlock(&mMutex);
6194 return rc;
6195 }
6196
6197 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006198 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006199 if (rc < 0) {
6200 LOGE("Error reading the real time clock, cannot use timed wait");
6201 } else {
6202 timeout.tv_sec += FLUSH_TIMEOUT;
6203 timed_wait = true;
6204 }
6205
6206 //Block on conditional variable
6207 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6208 LOGD("Waiting on mBuffersCond");
6209 if (!timed_wait) {
6210 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6211 if (rc != 0) {
6212 LOGE("pthread_cond_wait failed due to rc = %s",
6213 strerror(rc));
6214 break;
6215 }
6216 } else {
6217 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6218 if (rc != 0) {
6219 LOGE("pthread_cond_timedwait failed due to rc = %s",
6220 strerror(rc));
6221 break;
6222 }
6223 }
6224 }
6225 if (rc != 0) {
6226 mFlushPerf = false;
6227 pthread_mutex_unlock(&mMutex);
6228 return -ENODEV;
6229 }
6230
6231 LOGD("Received buffers, now safe to return them");
6232
6233 //make sure the channels handle flush
6234 //currently only required for the picture channel to release snapshot resources
6235 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6236 it != mStreamInfo.end(); it++) {
6237 QCamera3Channel *channel = (*it)->channel;
6238 if (channel) {
6239 rc = channel->flush();
6240 if (rc) {
6241 LOGE("Flushing the channels failed with error %d", rc);
6242 // even though the channel flush failed we need to continue and
6243 // return the buffers we have to the framework, however the return
6244 // value will be an error
6245 rc = -ENODEV;
6246 }
6247 }
6248 }
6249
6250 /* notify the frameworks and send errored results */
6251 rc = notifyErrorForPendingRequests();
6252 if (rc < 0) {
6253 LOGE("notifyErrorForPendingRequests failed");
6254 pthread_mutex_unlock(&mMutex);
6255 return rc;
6256 }
6257
6258 //unblock process_capture_request
6259 mPendingLiveRequest = 0;
6260 unblockRequestIfNecessary();
6261
6262 mFlushPerf = false;
6263 pthread_mutex_unlock(&mMutex);
6264 LOGD ("Flush Operation complete. rc = %d", rc);
6265 return rc;
6266}
6267
6268/*===========================================================================
6269 * FUNCTION : handleCameraDeviceError
6270 *
6271 * DESCRIPTION: This function calls internal flush and notifies the error to
6272 * framework and updates the state variable.
6273 *
6274 * PARAMETERS : None
6275 *
6276 * RETURN : NO_ERROR on Success
6277 * Error code on failure
6278 *==========================================================================*/
6279int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6280{
6281 int32_t rc = NO_ERROR;
6282
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006283 {
6284 Mutex::Autolock lock(mFlushLock);
6285 pthread_mutex_lock(&mMutex);
6286 if (mState != ERROR) {
6287 //if mState != ERROR, nothing to be done
6288 pthread_mutex_unlock(&mMutex);
6289 return NO_ERROR;
6290 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006291 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006292
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006293 rc = flush(false /* restart channels */);
6294 if (NO_ERROR != rc) {
6295 LOGE("internal flush to handle mState = ERROR failed");
6296 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006297
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006298 pthread_mutex_lock(&mMutex);
6299 mState = DEINIT;
6300 pthread_mutex_unlock(&mMutex);
6301 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006302
6303 camera3_notify_msg_t notify_msg;
6304 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6305 notify_msg.type = CAMERA3_MSG_ERROR;
6306 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6307 notify_msg.message.error.error_stream = NULL;
6308 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006309 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006310
6311 return rc;
6312}
6313
6314/*===========================================================================
6315 * FUNCTION : captureResultCb
6316 *
6317 * DESCRIPTION: Callback handler for all capture result
6318 * (streams, as well as metadata)
6319 *
6320 * PARAMETERS :
6321 * @metadata : metadata information
6322 * @buffer : actual gralloc buffer to be returned to frameworks.
6323 * NULL if metadata.
6324 *
6325 * RETURN : NONE
6326 *==========================================================================*/
6327void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6328 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6329{
6330 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006331 pthread_mutex_lock(&mMutex);
6332 uint8_t batchSize = mBatchSize;
6333 pthread_mutex_unlock(&mMutex);
6334 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006335 handleBatchMetadata(metadata_buf,
6336 true /* free_and_bufdone_meta_buf */);
6337 } else { /* mBatchSize = 0 */
6338 hdrPlusPerfLock(metadata_buf);
6339 pthread_mutex_lock(&mMutex);
6340 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006341 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006342 true /* last urgent frame of batch metadata */,
6343 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006344 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006345 pthread_mutex_unlock(&mMutex);
6346 }
6347 } else if (isInputBuffer) {
6348 pthread_mutex_lock(&mMutex);
6349 handleInputBufferWithLock(frame_number);
6350 pthread_mutex_unlock(&mMutex);
6351 } else {
6352 pthread_mutex_lock(&mMutex);
6353 handleBufferWithLock(buffer, frame_number);
6354 pthread_mutex_unlock(&mMutex);
6355 }
6356 return;
6357}
6358
6359/*===========================================================================
6360 * FUNCTION : getReprocessibleOutputStreamId
6361 *
6362 * DESCRIPTION: Get source output stream id for the input reprocess stream
6363 * based on size and format, which would be the largest
6364 * output stream if an input stream exists.
6365 *
6366 * PARAMETERS :
6367 * @id : return the stream id if found
6368 *
6369 * RETURN : int32_t type of status
6370 * NO_ERROR -- success
6371 * none-zero failure code
6372 *==========================================================================*/
6373int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6374{
6375 /* check if any output or bidirectional stream with the same size and format
6376 and return that stream */
6377 if ((mInputStreamInfo.dim.width > 0) &&
6378 (mInputStreamInfo.dim.height > 0)) {
6379 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6380 it != mStreamInfo.end(); it++) {
6381
6382 camera3_stream_t *stream = (*it)->stream;
6383 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6384 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6385 (stream->format == mInputStreamInfo.format)) {
6386 // Usage flag for an input stream and the source output stream
6387 // may be different.
6388 LOGD("Found reprocessible output stream! %p", *it);
6389 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6390 stream->usage, mInputStreamInfo.usage);
6391
6392 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6393 if (channel != NULL && channel->mStreams[0]) {
6394 id = channel->mStreams[0]->getMyServerID();
6395 return NO_ERROR;
6396 }
6397 }
6398 }
6399 } else {
6400 LOGD("No input stream, so no reprocessible output stream");
6401 }
6402 return NAME_NOT_FOUND;
6403}
6404
6405/*===========================================================================
6406 * FUNCTION : lookupFwkName
6407 *
6408 * DESCRIPTION: In case the enum is not same in fwk and backend
6409 * make sure the parameter is correctly propogated
6410 *
6411 * PARAMETERS :
6412 * @arr : map between the two enums
6413 * @len : len of the map
6414 * @hal_name : name of the hal_parm to map
6415 *
6416 * RETURN : int type of status
6417 * fwk_name -- success
6418 * none-zero failure code
6419 *==========================================================================*/
6420template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6421 size_t len, halType hal_name)
6422{
6423
6424 for (size_t i = 0; i < len; i++) {
6425 if (arr[i].hal_name == hal_name) {
6426 return arr[i].fwk_name;
6427 }
6428 }
6429
6430 /* Not able to find matching framework type is not necessarily
6431 * an error case. This happens when mm-camera supports more attributes
6432 * than the frameworks do */
6433 LOGH("Cannot find matching framework type");
6434 return NAME_NOT_FOUND;
6435}
6436
6437/*===========================================================================
6438 * FUNCTION : lookupHalName
6439 *
6440 * DESCRIPTION: In case the enum is not same in fwk and backend
6441 * make sure the parameter is correctly propogated
6442 *
6443 * PARAMETERS :
6444 * @arr : map between the two enums
6445 * @len : len of the map
6446 * @fwk_name : name of the hal_parm to map
6447 *
6448 * RETURN : int32_t type of status
6449 * hal_name -- success
6450 * none-zero failure code
6451 *==========================================================================*/
6452template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6453 size_t len, fwkType fwk_name)
6454{
6455 for (size_t i = 0; i < len; i++) {
6456 if (arr[i].fwk_name == fwk_name) {
6457 return arr[i].hal_name;
6458 }
6459 }
6460
6461 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6462 return NAME_NOT_FOUND;
6463}
6464
6465/*===========================================================================
6466 * FUNCTION : lookupProp
6467 *
6468 * DESCRIPTION: lookup a value by its name
6469 *
6470 * PARAMETERS :
6471 * @arr : map between the two enums
6472 * @len : size of the map
6473 * @name : name to be looked up
6474 *
6475 * RETURN : Value if found
6476 * CAM_CDS_MODE_MAX if not found
6477 *==========================================================================*/
6478template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6479 size_t len, const char *name)
6480{
6481 if (name) {
6482 for (size_t i = 0; i < len; i++) {
6483 if (!strcmp(arr[i].desc, name)) {
6484 return arr[i].val;
6485 }
6486 }
6487 }
6488 return CAM_CDS_MODE_MAX;
6489}
6490
6491/*===========================================================================
6492 *
6493 * DESCRIPTION:
6494 *
6495 * PARAMETERS :
6496 * @metadata : metadata information from callback
6497 * @timestamp: metadata buffer timestamp
6498 * @request_id: request id
6499 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006500 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006501 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6502 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006503 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006504 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6505 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006506 *
6507 * RETURN : camera_metadata_t*
6508 * metadata in a format specified by fwk
6509 *==========================================================================*/
6510camera_metadata_t*
6511QCamera3HardwareInterface::translateFromHalMetadata(
6512 metadata_buffer_t *metadata,
6513 nsecs_t timestamp,
6514 int32_t request_id,
6515 const CameraMetadata& jpegMetadata,
6516 uint8_t pipeline_depth,
6517 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006518 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006519 /* DevCamDebug metadata translateFromHalMetadata argument */
6520 uint8_t DevCamDebug_meta_enable,
6521 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006522 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006523 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006524 bool lastMetadataInBatch,
6525 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006526{
6527 CameraMetadata camMetadata;
6528 camera_metadata_t *resultMetadata;
6529
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006530 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006531 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6532 * Timestamp is needed because it's used for shutter notify calculation.
6533 * */
6534 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6535 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006536 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006537 }
6538
Thierry Strudel3d639192016-09-09 11:52:26 -07006539 if (jpegMetadata.entryCount())
6540 camMetadata.append(jpegMetadata);
6541
6542 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6543 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6544 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6545 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006546 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006547 if (mBatchSize == 0) {
6548 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6549 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6550 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006551
Samuel Ha68ba5172016-12-15 18:41:12 -08006552 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6553 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6554 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6555 // DevCamDebug metadata translateFromHalMetadata AF
6556 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6557 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6558 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6559 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6560 }
6561 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6562 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6563 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6564 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6565 }
6566 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6567 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6568 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6569 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6570 }
6571 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6572 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6573 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6574 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6575 }
6576 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6577 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6578 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6579 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6580 }
6581 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6582 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6583 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6584 *DevCamDebug_af_monitor_pdaf_target_pos;
6585 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6586 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6587 }
6588 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6589 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6590 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6591 *DevCamDebug_af_monitor_pdaf_confidence;
6592 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6593 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6594 }
6595 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6596 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6597 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6598 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6599 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6600 }
6601 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6602 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6603 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6604 *DevCamDebug_af_monitor_tof_target_pos;
6605 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6606 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6607 }
6608 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6609 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6610 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6611 *DevCamDebug_af_monitor_tof_confidence;
6612 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6613 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6614 }
6615 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6616 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6617 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6618 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6619 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6620 }
6621 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6622 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6623 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6624 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6625 &fwk_DevCamDebug_af_monitor_type_select, 1);
6626 }
6627 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6628 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6629 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6630 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6631 &fwk_DevCamDebug_af_monitor_refocus, 1);
6632 }
6633 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6634 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6635 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6636 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6637 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6638 }
6639 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6640 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6641 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6642 *DevCamDebug_af_search_pdaf_target_pos;
6643 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6644 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6645 }
6646 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6647 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6648 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6649 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6650 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6651 }
6652 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6653 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6654 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6655 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6656 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6657 }
6658 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6659 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6660 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6661 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6662 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6663 }
6664 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6665 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6666 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6667 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6668 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6669 }
6670 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6671 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6672 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6673 *DevCamDebug_af_search_tof_target_pos;
6674 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6675 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6676 }
6677 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6678 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6679 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6680 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6681 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6682 }
6683 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6684 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6685 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6686 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6687 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6688 }
6689 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6690 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6691 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6692 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6693 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6694 }
6695 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6696 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6697 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6698 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6699 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6700 }
6701 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6702 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6703 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6704 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6705 &fwk_DevCamDebug_af_search_type_select, 1);
6706 }
6707 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6708 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6709 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6710 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6711 &fwk_DevCamDebug_af_search_next_pos, 1);
6712 }
6713 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6714 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6715 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6716 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6717 &fwk_DevCamDebug_af_search_target_pos, 1);
6718 }
6719 // DevCamDebug metadata translateFromHalMetadata AEC
6720 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6721 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6722 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6723 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6724 }
6725 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6726 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6727 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6728 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6729 }
6730 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6731 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6732 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6733 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6734 }
6735 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6736 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6737 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6738 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6739 }
6740 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6741 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6742 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6743 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6744 }
6745 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6746 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6747 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6748 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6749 }
6750 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6751 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6752 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6753 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6754 }
6755 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6756 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6757 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6758 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6759 }
Samuel Ha34229982017-02-17 13:51:11 -08006760 // DevCamDebug metadata translateFromHalMetadata zzHDR
6761 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6762 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6763 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6764 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6765 }
6766 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6767 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006768 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006769 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6770 }
6771 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6772 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6773 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6774 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6775 }
6776 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6777 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006778 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006779 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6780 }
6781 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6782 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6783 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6784 *DevCamDebug_aec_hdr_sensitivity_ratio;
6785 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6786 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6787 }
6788 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6789 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6790 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6791 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6792 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6793 }
6794 // DevCamDebug metadata translateFromHalMetadata ADRC
6795 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6796 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6797 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6798 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6799 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6800 }
6801 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6802 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6803 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6804 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6805 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6806 }
6807 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6808 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6809 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6810 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6811 }
6812 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6813 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6814 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6815 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6816 }
6817 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6818 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6819 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6820 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6821 }
6822 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6823 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6824 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6825 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6826 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006827 // DevCamDebug metadata translateFromHalMetadata AWB
6828 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6829 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6830 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6831 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6832 }
6833 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6834 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6835 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6836 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6837 }
6838 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6839 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6840 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6841 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6842 }
6843 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6844 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6845 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6846 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6847 }
6848 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6849 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6850 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6851 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6852 }
6853 }
6854 // atrace_end(ATRACE_TAG_ALWAYS);
6855
Thierry Strudel3d639192016-09-09 11:52:26 -07006856 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6857 int64_t fwk_frame_number = *frame_number;
6858 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6859 }
6860
6861 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6862 int32_t fps_range[2];
6863 fps_range[0] = (int32_t)float_range->min_fps;
6864 fps_range[1] = (int32_t)float_range->max_fps;
6865 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6866 fps_range, 2);
6867 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6868 fps_range[0], fps_range[1]);
6869 }
6870
6871 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6872 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6873 }
6874
6875 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6876 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6877 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6878 *sceneMode);
6879 if (NAME_NOT_FOUND != val) {
6880 uint8_t fwkSceneMode = (uint8_t)val;
6881 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6882 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6883 fwkSceneMode);
6884 }
6885 }
6886
6887 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6888 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6889 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6890 }
6891
6892 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6893 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6894 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6895 }
6896
6897 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6898 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6899 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6900 }
6901
6902 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6903 CAM_INTF_META_EDGE_MODE, metadata) {
6904 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6905 }
6906
6907 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6908 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6909 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6910 }
6911
6912 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6913 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6914 }
6915
6916 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6917 if (0 <= *flashState) {
6918 uint8_t fwk_flashState = (uint8_t) *flashState;
6919 if (!gCamCapability[mCameraId]->flash_available) {
6920 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6921 }
6922 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6923 }
6924 }
6925
6926 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6927 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6928 if (NAME_NOT_FOUND != val) {
6929 uint8_t fwk_flashMode = (uint8_t)val;
6930 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6931 }
6932 }
6933
6934 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6935 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6936 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6937 }
6938
6939 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6940 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6941 }
6942
6943 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6944 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6945 }
6946
6947 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6948 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6949 }
6950
6951 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6952 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6953 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6954 }
6955
6956 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6957 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6958 LOGD("fwk_videoStab = %d", fwk_videoStab);
6959 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6960 } else {
6961 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6962 // and so hardcoding the Video Stab result to OFF mode.
6963 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6964 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006965 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006966 }
6967
6968 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6969 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6970 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6971 }
6972
6973 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6974 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6975 }
6976
Thierry Strudel3d639192016-09-09 11:52:26 -07006977 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6978 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006979 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006980
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006981 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6982 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006983
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006984 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006985 blackLevelAppliedPattern->cam_black_level[0],
6986 blackLevelAppliedPattern->cam_black_level[1],
6987 blackLevelAppliedPattern->cam_black_level[2],
6988 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006989 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6990 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006991
6992#ifndef USE_HAL_3_3
6993 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05306994 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07006995 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05306996 fwk_blackLevelInd[0] /= 16.0;
6997 fwk_blackLevelInd[1] /= 16.0;
6998 fwk_blackLevelInd[2] /= 16.0;
6999 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007000 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7001 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007002#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007003 }
7004
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007005#ifndef USE_HAL_3_3
7006 // Fixed whitelevel is used by ISP/Sensor
7007 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7008 &gCamCapability[mCameraId]->white_level, 1);
7009#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007010
7011 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7012 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7013 int32_t scalerCropRegion[4];
7014 scalerCropRegion[0] = hScalerCropRegion->left;
7015 scalerCropRegion[1] = hScalerCropRegion->top;
7016 scalerCropRegion[2] = hScalerCropRegion->width;
7017 scalerCropRegion[3] = hScalerCropRegion->height;
7018
7019 // Adjust crop region from sensor output coordinate system to active
7020 // array coordinate system.
7021 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7022 scalerCropRegion[2], scalerCropRegion[3]);
7023
7024 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7025 }
7026
7027 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7028 LOGD("sensorExpTime = %lld", *sensorExpTime);
7029 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7030 }
7031
7032 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7033 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7034 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7035 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7036 }
7037
7038 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7039 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7040 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7041 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7042 sensorRollingShutterSkew, 1);
7043 }
7044
7045 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7046 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7047 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7048
7049 //calculate the noise profile based on sensitivity
7050 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7051 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7052 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7053 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7054 noise_profile[i] = noise_profile_S;
7055 noise_profile[i+1] = noise_profile_O;
7056 }
7057 LOGD("noise model entry (S, O) is (%f, %f)",
7058 noise_profile_S, noise_profile_O);
7059 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7060 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7061 }
7062
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007063#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007064 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007065 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007066 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007067 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007068 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7069 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7070 }
7071 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007072#endif
7073
Thierry Strudel3d639192016-09-09 11:52:26 -07007074 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7075 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7076 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7077 }
7078
7079 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7080 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7081 *faceDetectMode);
7082 if (NAME_NOT_FOUND != val) {
7083 uint8_t fwk_faceDetectMode = (uint8_t)val;
7084 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7085
7086 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7087 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7088 CAM_INTF_META_FACE_DETECTION, metadata) {
7089 uint8_t numFaces = MIN(
7090 faceDetectionInfo->num_faces_detected, MAX_ROI);
7091 int32_t faceIds[MAX_ROI];
7092 uint8_t faceScores[MAX_ROI];
7093 int32_t faceRectangles[MAX_ROI * 4];
7094 int32_t faceLandmarks[MAX_ROI * 6];
7095 size_t j = 0, k = 0;
7096
7097 for (size_t i = 0; i < numFaces; i++) {
7098 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7099 // Adjust crop region from sensor output coordinate system to active
7100 // array coordinate system.
7101 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7102 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7103 rect.width, rect.height);
7104
7105 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7106 faceRectangles+j, -1);
7107
Jason Lee8ce36fa2017-04-19 19:40:37 -07007108 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7109 "bottom-right (%d, %d)",
7110 faceDetectionInfo->frame_id, i,
7111 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7112 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7113
Thierry Strudel3d639192016-09-09 11:52:26 -07007114 j+= 4;
7115 }
7116 if (numFaces <= 0) {
7117 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7118 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7119 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7120 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7121 }
7122
7123 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7124 numFaces);
7125 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7126 faceRectangles, numFaces * 4U);
7127 if (fwk_faceDetectMode ==
7128 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7129 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7130 CAM_INTF_META_FACE_LANDMARK, metadata) {
7131
7132 for (size_t i = 0; i < numFaces; i++) {
7133 // Map the co-ordinate sensor output coordinate system to active
7134 // array coordinate system.
7135 mCropRegionMapper.toActiveArray(
7136 landmarks->face_landmarks[i].left_eye_center.x,
7137 landmarks->face_landmarks[i].left_eye_center.y);
7138 mCropRegionMapper.toActiveArray(
7139 landmarks->face_landmarks[i].right_eye_center.x,
7140 landmarks->face_landmarks[i].right_eye_center.y);
7141 mCropRegionMapper.toActiveArray(
7142 landmarks->face_landmarks[i].mouth_center.x,
7143 landmarks->face_landmarks[i].mouth_center.y);
7144
7145 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007146
7147 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7148 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7149 faceDetectionInfo->frame_id, i,
7150 faceLandmarks[k + LEFT_EYE_X],
7151 faceLandmarks[k + LEFT_EYE_Y],
7152 faceLandmarks[k + RIGHT_EYE_X],
7153 faceLandmarks[k + RIGHT_EYE_Y],
7154 faceLandmarks[k + MOUTH_X],
7155 faceLandmarks[k + MOUTH_Y]);
7156
Thierry Strudel04e026f2016-10-10 11:27:36 -07007157 k+= TOTAL_LANDMARK_INDICES;
7158 }
7159 } else {
7160 for (size_t i = 0; i < numFaces; i++) {
7161 setInvalidLandmarks(faceLandmarks+k);
7162 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007163 }
7164 }
7165
Jason Lee49619db2017-04-13 12:07:22 -07007166 for (size_t i = 0; i < numFaces; i++) {
7167 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7168
7169 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7170 faceDetectionInfo->frame_id, i, faceIds[i]);
7171 }
7172
Thierry Strudel3d639192016-09-09 11:52:26 -07007173 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7174 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7175 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007176 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007177 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7178 CAM_INTF_META_FACE_BLINK, metadata) {
7179 uint8_t detected[MAX_ROI];
7180 uint8_t degree[MAX_ROI * 2];
7181 for (size_t i = 0; i < numFaces; i++) {
7182 detected[i] = blinks->blink[i].blink_detected;
7183 degree[2 * i] = blinks->blink[i].left_blink;
7184 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007185
Jason Lee49619db2017-04-13 12:07:22 -07007186 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7187 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7188 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7189 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007190 }
7191 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7192 detected, numFaces);
7193 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7194 degree, numFaces * 2);
7195 }
7196 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7197 CAM_INTF_META_FACE_SMILE, metadata) {
7198 uint8_t degree[MAX_ROI];
7199 uint8_t confidence[MAX_ROI];
7200 for (size_t i = 0; i < numFaces; i++) {
7201 degree[i] = smiles->smile[i].smile_degree;
7202 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007203
Jason Lee49619db2017-04-13 12:07:22 -07007204 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7205 "smile_degree=%d, smile_score=%d",
7206 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007207 }
7208 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7209 degree, numFaces);
7210 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7211 confidence, numFaces);
7212 }
7213 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7214 CAM_INTF_META_FACE_GAZE, metadata) {
7215 int8_t angle[MAX_ROI];
7216 int32_t direction[MAX_ROI * 3];
7217 int8_t degree[MAX_ROI * 2];
7218 for (size_t i = 0; i < numFaces; i++) {
7219 angle[i] = gazes->gaze[i].gaze_angle;
7220 direction[3 * i] = gazes->gaze[i].updown_dir;
7221 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7222 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7223 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7224 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007225
7226 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7227 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7228 "left_right_gaze=%d, top_bottom_gaze=%d",
7229 faceDetectionInfo->frame_id, i, angle[i],
7230 direction[3 * i], direction[3 * i + 1],
7231 direction[3 * i + 2],
7232 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007233 }
7234 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7235 (uint8_t *)angle, numFaces);
7236 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7237 direction, numFaces * 3);
7238 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7239 (uint8_t *)degree, numFaces * 2);
7240 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007241 }
7242 }
7243 }
7244 }
7245
7246 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7247 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007248 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007249 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007250 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007251
Shuzhen Wang14415f52016-11-16 18:26:18 -08007252 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7253 histogramBins = *histBins;
7254 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7255 }
7256
7257 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007258 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7259 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007260 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007261
7262 switch (stats_data->type) {
7263 case CAM_HISTOGRAM_TYPE_BAYER:
7264 switch (stats_data->bayer_stats.data_type) {
7265 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007266 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7267 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007268 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007269 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7270 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007271 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007272 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7273 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007274 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007275 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007276 case CAM_STATS_CHANNEL_R:
7277 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007278 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7279 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007280 }
7281 break;
7282 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007283 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007284 break;
7285 }
7286
Shuzhen Wang14415f52016-11-16 18:26:18 -08007287 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007288 }
7289 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007290 }
7291
7292 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7293 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7294 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7295 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7296 }
7297
7298 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7299 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7300 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7301 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7302 }
7303
7304 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7305 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7306 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7307 CAM_MAX_SHADING_MAP_HEIGHT);
7308 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7309 CAM_MAX_SHADING_MAP_WIDTH);
7310 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7311 lensShadingMap->lens_shading, 4U * map_width * map_height);
7312 }
7313
7314 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7315 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7316 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7317 }
7318
7319 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7320 //Populate CAM_INTF_META_TONEMAP_CURVES
7321 /* ch0 = G, ch 1 = B, ch 2 = R*/
7322 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7323 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7324 tonemap->tonemap_points_cnt,
7325 CAM_MAX_TONEMAP_CURVE_SIZE);
7326 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7327 }
7328
7329 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7330 &tonemap->curves[0].tonemap_points[0][0],
7331 tonemap->tonemap_points_cnt * 2);
7332
7333 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7334 &tonemap->curves[1].tonemap_points[0][0],
7335 tonemap->tonemap_points_cnt * 2);
7336
7337 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7338 &tonemap->curves[2].tonemap_points[0][0],
7339 tonemap->tonemap_points_cnt * 2);
7340 }
7341
7342 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7343 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7344 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7345 CC_GAIN_MAX);
7346 }
7347
7348 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7349 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7350 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7351 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7352 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7353 }
7354
7355 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7356 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7357 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7358 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7359 toneCurve->tonemap_points_cnt,
7360 CAM_MAX_TONEMAP_CURVE_SIZE);
7361 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7362 }
7363 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7364 (float*)toneCurve->curve.tonemap_points,
7365 toneCurve->tonemap_points_cnt * 2);
7366 }
7367
7368 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7369 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7370 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7371 predColorCorrectionGains->gains, 4);
7372 }
7373
7374 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7375 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7376 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7377 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7378 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7379 }
7380
7381 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7382 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7383 }
7384
7385 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7386 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7387 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7388 }
7389
7390 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7391 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7392 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7393 }
7394
7395 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7396 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7397 *effectMode);
7398 if (NAME_NOT_FOUND != val) {
7399 uint8_t fwk_effectMode = (uint8_t)val;
7400 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7401 }
7402 }
7403
7404 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7405 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7406 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7407 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7408 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7409 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7410 }
7411 int32_t fwk_testPatternData[4];
7412 fwk_testPatternData[0] = testPatternData->r;
7413 fwk_testPatternData[3] = testPatternData->b;
7414 switch (gCamCapability[mCameraId]->color_arrangement) {
7415 case CAM_FILTER_ARRANGEMENT_RGGB:
7416 case CAM_FILTER_ARRANGEMENT_GRBG:
7417 fwk_testPatternData[1] = testPatternData->gr;
7418 fwk_testPatternData[2] = testPatternData->gb;
7419 break;
7420 case CAM_FILTER_ARRANGEMENT_GBRG:
7421 case CAM_FILTER_ARRANGEMENT_BGGR:
7422 fwk_testPatternData[2] = testPatternData->gr;
7423 fwk_testPatternData[1] = testPatternData->gb;
7424 break;
7425 default:
7426 LOGE("color arrangement %d is not supported",
7427 gCamCapability[mCameraId]->color_arrangement);
7428 break;
7429 }
7430 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7431 }
7432
7433 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7434 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7435 }
7436
7437 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7438 String8 str((const char *)gps_methods);
7439 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7440 }
7441
7442 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7443 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7444 }
7445
7446 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7447 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7448 }
7449
7450 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7451 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7452 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7453 }
7454
7455 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7456 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7457 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7458 }
7459
7460 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7461 int32_t fwk_thumb_size[2];
7462 fwk_thumb_size[0] = thumb_size->width;
7463 fwk_thumb_size[1] = thumb_size->height;
7464 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7465 }
7466
7467 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7468 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7469 privateData,
7470 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7471 }
7472
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007473 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007474 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007475 meteringMode, 1);
7476 }
7477
Thierry Strudel54dc9782017-02-15 12:12:10 -08007478 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7479 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7480 LOGD("hdr_scene_data: %d %f\n",
7481 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7482 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7483 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7484 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7485 &isHdr, 1);
7486 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7487 &isHdrConfidence, 1);
7488 }
7489
7490
7491
Thierry Strudel3d639192016-09-09 11:52:26 -07007492 if (metadata->is_tuning_params_valid) {
7493 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7494 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7495 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7496
7497
7498 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7499 sizeof(uint32_t));
7500 data += sizeof(uint32_t);
7501
7502 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7503 sizeof(uint32_t));
7504 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7505 data += sizeof(uint32_t);
7506
7507 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7508 sizeof(uint32_t));
7509 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7510 data += sizeof(uint32_t);
7511
7512 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7513 sizeof(uint32_t));
7514 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7515 data += sizeof(uint32_t);
7516
7517 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7518 sizeof(uint32_t));
7519 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7520 data += sizeof(uint32_t);
7521
7522 metadata->tuning_params.tuning_mod3_data_size = 0;
7523 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7524 sizeof(uint32_t));
7525 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7526 data += sizeof(uint32_t);
7527
7528 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7529 TUNING_SENSOR_DATA_MAX);
7530 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7531 count);
7532 data += count;
7533
7534 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7535 TUNING_VFE_DATA_MAX);
7536 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7537 count);
7538 data += count;
7539
7540 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7541 TUNING_CPP_DATA_MAX);
7542 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7543 count);
7544 data += count;
7545
7546 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7547 TUNING_CAC_DATA_MAX);
7548 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7549 count);
7550 data += count;
7551
7552 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7553 (int32_t *)(void *)tuning_meta_data_blob,
7554 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7555 }
7556
7557 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7558 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7559 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7560 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7561 NEUTRAL_COL_POINTS);
7562 }
7563
7564 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7565 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7566 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7567 }
7568
7569 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7570 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7571 // Adjust crop region from sensor output coordinate system to active
7572 // array coordinate system.
7573 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7574 hAeRegions->rect.width, hAeRegions->rect.height);
7575
7576 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7577 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7578 REGIONS_TUPLE_COUNT);
7579 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7580 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7581 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7582 hAeRegions->rect.height);
7583 }
7584
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007585 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7586 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7587 if (NAME_NOT_FOUND != val) {
7588 uint8_t fwkAfMode = (uint8_t)val;
7589 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7590 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7591 } else {
7592 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7593 val);
7594 }
7595 }
7596
Thierry Strudel3d639192016-09-09 11:52:26 -07007597 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7598 uint8_t fwk_afState = (uint8_t) *afState;
7599 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007600 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007601 }
7602
7603 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7604 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7605 }
7606
7607 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7608 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7609 }
7610
7611 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7612 uint8_t fwk_lensState = *lensState;
7613 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7614 }
7615
Thierry Strudel3d639192016-09-09 11:52:26 -07007616
7617 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007618 uint32_t ab_mode = *hal_ab_mode;
7619 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7620 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7621 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7622 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007623 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007624 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007625 if (NAME_NOT_FOUND != val) {
7626 uint8_t fwk_ab_mode = (uint8_t)val;
7627 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7628 }
7629 }
7630
7631 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7632 int val = lookupFwkName(SCENE_MODES_MAP,
7633 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7634 if (NAME_NOT_FOUND != val) {
7635 uint8_t fwkBestshotMode = (uint8_t)val;
7636 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7637 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7638 } else {
7639 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7640 }
7641 }
7642
7643 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7644 uint8_t fwk_mode = (uint8_t) *mode;
7645 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7646 }
7647
7648 /* Constant metadata values to be update*/
7649 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7650 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7651
7652 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7653 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7654
7655 int32_t hotPixelMap[2];
7656 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7657
7658 // CDS
7659 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7660 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7661 }
7662
Thierry Strudel04e026f2016-10-10 11:27:36 -07007663 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7664 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007665 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007666 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7667 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7668 } else {
7669 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7670 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007671
7672 if(fwk_hdr != curr_hdr_state) {
7673 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7674 if(fwk_hdr)
7675 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7676 else
7677 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7678 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007679 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7680 }
7681
Thierry Strudel54dc9782017-02-15 12:12:10 -08007682 //binning correction
7683 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7684 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7685 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7686 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7687 }
7688
Thierry Strudel04e026f2016-10-10 11:27:36 -07007689 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007690 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007691 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7692 int8_t is_ir_on = 0;
7693
7694 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7695 if(is_ir_on != curr_ir_state) {
7696 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7697 if(is_ir_on)
7698 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7699 else
7700 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7701 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007702 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007703 }
7704
Thierry Strudel269c81a2016-10-12 12:13:59 -07007705 // AEC SPEED
7706 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7707 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7708 }
7709
7710 // AWB SPEED
7711 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7712 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7713 }
7714
Thierry Strudel3d639192016-09-09 11:52:26 -07007715 // TNR
7716 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7717 uint8_t tnr_enable = tnr->denoise_enable;
7718 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007719 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7720 int8_t is_tnr_on = 0;
7721
7722 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7723 if(is_tnr_on != curr_tnr_state) {
7724 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7725 if(is_tnr_on)
7726 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7727 else
7728 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7729 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007730
7731 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7732 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7733 }
7734
7735 // Reprocess crop data
7736 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7737 uint8_t cnt = crop_data->num_of_streams;
7738 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7739 // mm-qcamera-daemon only posts crop_data for streams
7740 // not linked to pproc. So no valid crop metadata is not
7741 // necessarily an error case.
7742 LOGD("No valid crop metadata entries");
7743 } else {
7744 uint32_t reproc_stream_id;
7745 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7746 LOGD("No reprocessible stream found, ignore crop data");
7747 } else {
7748 int rc = NO_ERROR;
7749 Vector<int32_t> roi_map;
7750 int32_t *crop = new int32_t[cnt*4];
7751 if (NULL == crop) {
7752 rc = NO_MEMORY;
7753 }
7754 if (NO_ERROR == rc) {
7755 int32_t streams_found = 0;
7756 for (size_t i = 0; i < cnt; i++) {
7757 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7758 if (pprocDone) {
7759 // HAL already does internal reprocessing,
7760 // either via reprocessing before JPEG encoding,
7761 // or offline postprocessing for pproc bypass case.
7762 crop[0] = 0;
7763 crop[1] = 0;
7764 crop[2] = mInputStreamInfo.dim.width;
7765 crop[3] = mInputStreamInfo.dim.height;
7766 } else {
7767 crop[0] = crop_data->crop_info[i].crop.left;
7768 crop[1] = crop_data->crop_info[i].crop.top;
7769 crop[2] = crop_data->crop_info[i].crop.width;
7770 crop[3] = crop_data->crop_info[i].crop.height;
7771 }
7772 roi_map.add(crop_data->crop_info[i].roi_map.left);
7773 roi_map.add(crop_data->crop_info[i].roi_map.top);
7774 roi_map.add(crop_data->crop_info[i].roi_map.width);
7775 roi_map.add(crop_data->crop_info[i].roi_map.height);
7776 streams_found++;
7777 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7778 crop[0], crop[1], crop[2], crop[3]);
7779 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7780 crop_data->crop_info[i].roi_map.left,
7781 crop_data->crop_info[i].roi_map.top,
7782 crop_data->crop_info[i].roi_map.width,
7783 crop_data->crop_info[i].roi_map.height);
7784 break;
7785
7786 }
7787 }
7788 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7789 &streams_found, 1);
7790 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7791 crop, (size_t)(streams_found * 4));
7792 if (roi_map.array()) {
7793 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7794 roi_map.array(), roi_map.size());
7795 }
7796 }
7797 if (crop) {
7798 delete [] crop;
7799 }
7800 }
7801 }
7802 }
7803
7804 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7805 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7806 // so hardcoding the CAC result to OFF mode.
7807 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7808 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7809 } else {
7810 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7811 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7812 *cacMode);
7813 if (NAME_NOT_FOUND != val) {
7814 uint8_t resultCacMode = (uint8_t)val;
7815 // check whether CAC result from CB is equal to Framework set CAC mode
7816 // If not equal then set the CAC mode came in corresponding request
7817 if (fwk_cacMode != resultCacMode) {
7818 resultCacMode = fwk_cacMode;
7819 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007820 //Check if CAC is disabled by property
7821 if (m_cacModeDisabled) {
7822 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7823 }
7824
Thierry Strudel3d639192016-09-09 11:52:26 -07007825 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7826 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7827 } else {
7828 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7829 }
7830 }
7831 }
7832
7833 // Post blob of cam_cds_data through vendor tag.
7834 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7835 uint8_t cnt = cdsInfo->num_of_streams;
7836 cam_cds_data_t cdsDataOverride;
7837 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7838 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7839 cdsDataOverride.num_of_streams = 1;
7840 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7841 uint32_t reproc_stream_id;
7842 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7843 LOGD("No reprocessible stream found, ignore cds data");
7844 } else {
7845 for (size_t i = 0; i < cnt; i++) {
7846 if (cdsInfo->cds_info[i].stream_id ==
7847 reproc_stream_id) {
7848 cdsDataOverride.cds_info[0].cds_enable =
7849 cdsInfo->cds_info[i].cds_enable;
7850 break;
7851 }
7852 }
7853 }
7854 } else {
7855 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7856 }
7857 camMetadata.update(QCAMERA3_CDS_INFO,
7858 (uint8_t *)&cdsDataOverride,
7859 sizeof(cam_cds_data_t));
7860 }
7861
7862 // Ldaf calibration data
7863 if (!mLdafCalibExist) {
7864 IF_META_AVAILABLE(uint32_t, ldafCalib,
7865 CAM_INTF_META_LDAF_EXIF, metadata) {
7866 mLdafCalibExist = true;
7867 mLdafCalib[0] = ldafCalib[0];
7868 mLdafCalib[1] = ldafCalib[1];
7869 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7870 ldafCalib[0], ldafCalib[1]);
7871 }
7872 }
7873
Thierry Strudel54dc9782017-02-15 12:12:10 -08007874 // EXIF debug data through vendor tag
7875 /*
7876 * Mobicat Mask can assume 3 values:
7877 * 1 refers to Mobicat data,
7878 * 2 refers to Stats Debug and Exif Debug Data
7879 * 3 refers to Mobicat and Stats Debug Data
7880 * We want to make sure that we are sending Exif debug data
7881 * only when Mobicat Mask is 2.
7882 */
7883 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7884 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7885 (uint8_t *)(void *)mExifParams.debug_params,
7886 sizeof(mm_jpeg_debug_exif_params_t));
7887 }
7888
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007889 // Reprocess and DDM debug data through vendor tag
7890 cam_reprocess_info_t repro_info;
7891 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007892 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7893 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007894 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007895 }
7896 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7897 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007898 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007899 }
7900 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7901 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007902 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007903 }
7904 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7905 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007906 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007907 }
7908 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7909 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007910 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007911 }
7912 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007913 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007914 }
7915 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7916 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007917 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007918 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007919 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7920 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7921 }
7922 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7923 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7924 }
7925 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7926 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007927
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007928 // INSTANT AEC MODE
7929 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7930 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7931 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7932 }
7933
Shuzhen Wange763e802016-03-31 10:24:29 -07007934 // AF scene change
7935 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7936 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7937 }
7938
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07007939 // Enable ZSL
7940 if (enableZsl != nullptr) {
7941 uint8_t value = *enableZsl ?
7942 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
7943 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
7944 }
7945
Xu Han821ea9c2017-05-23 09:00:40 -07007946 // OIS Data
7947 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
7948 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
7949 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
7950 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
7951 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
7952 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
7953 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
7954 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
7955 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
7956 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
7957 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
7958 }
7959
Thierry Strudel3d639192016-09-09 11:52:26 -07007960 resultMetadata = camMetadata.release();
7961 return resultMetadata;
7962}
7963
7964/*===========================================================================
7965 * FUNCTION : saveExifParams
7966 *
7967 * DESCRIPTION:
7968 *
7969 * PARAMETERS :
7970 * @metadata : metadata information from callback
7971 *
7972 * RETURN : none
7973 *
7974 *==========================================================================*/
7975void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7976{
7977 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7978 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7979 if (mExifParams.debug_params) {
7980 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7981 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7982 }
7983 }
7984 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7985 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7986 if (mExifParams.debug_params) {
7987 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7988 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7989 }
7990 }
7991 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7992 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7993 if (mExifParams.debug_params) {
7994 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7995 mExifParams.debug_params->af_debug_params_valid = TRUE;
7996 }
7997 }
7998 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7999 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8000 if (mExifParams.debug_params) {
8001 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8002 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8003 }
8004 }
8005 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8006 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8007 if (mExifParams.debug_params) {
8008 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8009 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8010 }
8011 }
8012 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8013 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8014 if (mExifParams.debug_params) {
8015 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8016 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8017 }
8018 }
8019 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8020 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8021 if (mExifParams.debug_params) {
8022 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8023 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8024 }
8025 }
8026 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8027 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8028 if (mExifParams.debug_params) {
8029 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8030 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8031 }
8032 }
8033}
8034
8035/*===========================================================================
8036 * FUNCTION : get3AExifParams
8037 *
8038 * DESCRIPTION:
8039 *
8040 * PARAMETERS : none
8041 *
8042 *
8043 * RETURN : mm_jpeg_exif_params_t
8044 *
8045 *==========================================================================*/
8046mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8047{
8048 return mExifParams;
8049}
8050
8051/*===========================================================================
8052 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8053 *
8054 * DESCRIPTION:
8055 *
8056 * PARAMETERS :
8057 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008058 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8059 * urgent metadata in a batch. Always true for
8060 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008061 *
8062 * RETURN : camera_metadata_t*
8063 * metadata in a format specified by fwk
8064 *==========================================================================*/
8065camera_metadata_t*
8066QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008067 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008068{
8069 CameraMetadata camMetadata;
8070 camera_metadata_t *resultMetadata;
8071
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008072 if (!lastUrgentMetadataInBatch) {
8073 /* In batch mode, use empty metadata if this is not the last in batch
8074 */
8075 resultMetadata = allocate_camera_metadata(0, 0);
8076 return resultMetadata;
8077 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008078
8079 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8080 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8081 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8082 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8083 }
8084
8085 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8086 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8087 &aecTrigger->trigger, 1);
8088 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8089 &aecTrigger->trigger_id, 1);
8090 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8091 aecTrigger->trigger);
8092 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8093 aecTrigger->trigger_id);
8094 }
8095
8096 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8097 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8098 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8099 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8100 }
8101
Thierry Strudel3d639192016-09-09 11:52:26 -07008102 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8103 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8104 &af_trigger->trigger, 1);
8105 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8106 af_trigger->trigger);
8107 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8108 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8109 af_trigger->trigger_id);
8110 }
8111
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008112 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8113 /*af regions*/
8114 int32_t afRegions[REGIONS_TUPLE_COUNT];
8115 // Adjust crop region from sensor output coordinate system to active
8116 // array coordinate system.
8117 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8118 hAfRegions->rect.width, hAfRegions->rect.height);
8119
8120 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8121 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8122 REGIONS_TUPLE_COUNT);
8123 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8124 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8125 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8126 hAfRegions->rect.height);
8127 }
8128
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008129 // AF region confidence
8130 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8131 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8132 }
8133
Thierry Strudel3d639192016-09-09 11:52:26 -07008134 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8135 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8136 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8137 if (NAME_NOT_FOUND != val) {
8138 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8139 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8140 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8141 } else {
8142 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8143 }
8144 }
8145
8146 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8147 uint32_t aeMode = CAM_AE_MODE_MAX;
8148 int32_t flashMode = CAM_FLASH_MODE_MAX;
8149 int32_t redeye = -1;
8150 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8151 aeMode = *pAeMode;
8152 }
8153 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8154 flashMode = *pFlashMode;
8155 }
8156 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8157 redeye = *pRedeye;
8158 }
8159
8160 if (1 == redeye) {
8161 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8162 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8163 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8164 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8165 flashMode);
8166 if (NAME_NOT_FOUND != val) {
8167 fwk_aeMode = (uint8_t)val;
8168 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8169 } else {
8170 LOGE("Unsupported flash mode %d", flashMode);
8171 }
8172 } else if (aeMode == CAM_AE_MODE_ON) {
8173 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8174 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8175 } else if (aeMode == CAM_AE_MODE_OFF) {
8176 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8177 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008178 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8179 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8180 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008181 } else {
8182 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8183 "flashMode:%d, aeMode:%u!!!",
8184 redeye, flashMode, aeMode);
8185 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008186 if (mInstantAEC) {
8187 // Increment frame Idx count untill a bound reached for instant AEC.
8188 mInstantAecFrameIdxCount++;
8189 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8190 CAM_INTF_META_AEC_INFO, metadata) {
8191 LOGH("ae_params->settled = %d",ae_params->settled);
8192 // If AEC settled, or if number of frames reached bound value,
8193 // should reset instant AEC.
8194 if (ae_params->settled ||
8195 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8196 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8197 mInstantAEC = false;
8198 mResetInstantAEC = true;
8199 mInstantAecFrameIdxCount = 0;
8200 }
8201 }
8202 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008203 resultMetadata = camMetadata.release();
8204 return resultMetadata;
8205}
8206
8207/*===========================================================================
8208 * FUNCTION : dumpMetadataToFile
8209 *
8210 * DESCRIPTION: Dumps tuning metadata to file system
8211 *
8212 * PARAMETERS :
8213 * @meta : tuning metadata
8214 * @dumpFrameCount : current dump frame count
8215 * @enabled : Enable mask
8216 *
8217 *==========================================================================*/
8218void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8219 uint32_t &dumpFrameCount,
8220 bool enabled,
8221 const char *type,
8222 uint32_t frameNumber)
8223{
8224 //Some sanity checks
8225 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8226 LOGE("Tuning sensor data size bigger than expected %d: %d",
8227 meta.tuning_sensor_data_size,
8228 TUNING_SENSOR_DATA_MAX);
8229 return;
8230 }
8231
8232 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8233 LOGE("Tuning VFE data size bigger than expected %d: %d",
8234 meta.tuning_vfe_data_size,
8235 TUNING_VFE_DATA_MAX);
8236 return;
8237 }
8238
8239 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8240 LOGE("Tuning CPP data size bigger than expected %d: %d",
8241 meta.tuning_cpp_data_size,
8242 TUNING_CPP_DATA_MAX);
8243 return;
8244 }
8245
8246 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8247 LOGE("Tuning CAC data size bigger than expected %d: %d",
8248 meta.tuning_cac_data_size,
8249 TUNING_CAC_DATA_MAX);
8250 return;
8251 }
8252 //
8253
8254 if(enabled){
8255 char timeBuf[FILENAME_MAX];
8256 char buf[FILENAME_MAX];
8257 memset(buf, 0, sizeof(buf));
8258 memset(timeBuf, 0, sizeof(timeBuf));
8259 time_t current_time;
8260 struct tm * timeinfo;
8261 time (&current_time);
8262 timeinfo = localtime (&current_time);
8263 if (timeinfo != NULL) {
8264 strftime (timeBuf, sizeof(timeBuf),
8265 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8266 }
8267 String8 filePath(timeBuf);
8268 snprintf(buf,
8269 sizeof(buf),
8270 "%dm_%s_%d.bin",
8271 dumpFrameCount,
8272 type,
8273 frameNumber);
8274 filePath.append(buf);
8275 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8276 if (file_fd >= 0) {
8277 ssize_t written_len = 0;
8278 meta.tuning_data_version = TUNING_DATA_VERSION;
8279 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8280 written_len += write(file_fd, data, sizeof(uint32_t));
8281 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8282 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8283 written_len += write(file_fd, data, sizeof(uint32_t));
8284 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8285 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8286 written_len += write(file_fd, data, sizeof(uint32_t));
8287 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8288 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8289 written_len += write(file_fd, data, sizeof(uint32_t));
8290 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8291 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8292 written_len += write(file_fd, data, sizeof(uint32_t));
8293 meta.tuning_mod3_data_size = 0;
8294 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8295 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8296 written_len += write(file_fd, data, sizeof(uint32_t));
8297 size_t total_size = meta.tuning_sensor_data_size;
8298 data = (void *)((uint8_t *)&meta.data);
8299 written_len += write(file_fd, data, total_size);
8300 total_size = meta.tuning_vfe_data_size;
8301 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8302 written_len += write(file_fd, data, total_size);
8303 total_size = meta.tuning_cpp_data_size;
8304 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8305 written_len += write(file_fd, data, total_size);
8306 total_size = meta.tuning_cac_data_size;
8307 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8308 written_len += write(file_fd, data, total_size);
8309 close(file_fd);
8310 }else {
8311 LOGE("fail to open file for metadata dumping");
8312 }
8313 }
8314}
8315
8316/*===========================================================================
8317 * FUNCTION : cleanAndSortStreamInfo
8318 *
8319 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8320 * and sort them such that raw stream is at the end of the list
8321 * This is a workaround for camera daemon constraint.
8322 *
8323 * PARAMETERS : None
8324 *
8325 *==========================================================================*/
8326void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8327{
8328 List<stream_info_t *> newStreamInfo;
8329
8330 /*clean up invalid streams*/
8331 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8332 it != mStreamInfo.end();) {
8333 if(((*it)->status) == INVALID){
8334 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8335 delete channel;
8336 free(*it);
8337 it = mStreamInfo.erase(it);
8338 } else {
8339 it++;
8340 }
8341 }
8342
8343 // Move preview/video/callback/snapshot streams into newList
8344 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8345 it != mStreamInfo.end();) {
8346 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8347 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8348 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8349 newStreamInfo.push_back(*it);
8350 it = mStreamInfo.erase(it);
8351 } else
8352 it++;
8353 }
8354 // Move raw streams into newList
8355 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8356 it != mStreamInfo.end();) {
8357 newStreamInfo.push_back(*it);
8358 it = mStreamInfo.erase(it);
8359 }
8360
8361 mStreamInfo = newStreamInfo;
8362}
8363
8364/*===========================================================================
8365 * FUNCTION : extractJpegMetadata
8366 *
8367 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8368 * JPEG metadata is cached in HAL, and return as part of capture
8369 * result when metadata is returned from camera daemon.
8370 *
8371 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8372 * @request: capture request
8373 *
8374 *==========================================================================*/
8375void QCamera3HardwareInterface::extractJpegMetadata(
8376 CameraMetadata& jpegMetadata,
8377 const camera3_capture_request_t *request)
8378{
8379 CameraMetadata frame_settings;
8380 frame_settings = request->settings;
8381
8382 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8383 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8384 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8385 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8386
8387 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8388 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8389 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8390 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8391
8392 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8393 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8394 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8395 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8396
8397 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8398 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8399 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8400 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8401
8402 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8403 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8404 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8405 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8406
8407 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8408 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8409 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8410 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8411
8412 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8413 int32_t thumbnail_size[2];
8414 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8415 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8416 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8417 int32_t orientation =
8418 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008419 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008420 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8421 int32_t temp;
8422 temp = thumbnail_size[0];
8423 thumbnail_size[0] = thumbnail_size[1];
8424 thumbnail_size[1] = temp;
8425 }
8426 }
8427 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8428 thumbnail_size,
8429 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8430 }
8431
8432}
8433
8434/*===========================================================================
8435 * FUNCTION : convertToRegions
8436 *
8437 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8438 *
8439 * PARAMETERS :
8440 * @rect : cam_rect_t struct to convert
8441 * @region : int32_t destination array
8442 * @weight : if we are converting from cam_area_t, weight is valid
8443 * else weight = -1
8444 *
8445 *==========================================================================*/
8446void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8447 int32_t *region, int weight)
8448{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008449 region[FACE_LEFT] = rect.left;
8450 region[FACE_TOP] = rect.top;
8451 region[FACE_RIGHT] = rect.left + rect.width;
8452 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008453 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008454 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008455 }
8456}
8457
8458/*===========================================================================
8459 * FUNCTION : convertFromRegions
8460 *
8461 * DESCRIPTION: helper method to convert from array to cam_rect_t
8462 *
8463 * PARAMETERS :
8464 * @rect : cam_rect_t struct to convert
8465 * @region : int32_t destination array
8466 * @weight : if we are converting from cam_area_t, weight is valid
8467 * else weight = -1
8468 *
8469 *==========================================================================*/
8470void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008471 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008472{
Thierry Strudel3d639192016-09-09 11:52:26 -07008473 int32_t x_min = frame_settings.find(tag).data.i32[0];
8474 int32_t y_min = frame_settings.find(tag).data.i32[1];
8475 int32_t x_max = frame_settings.find(tag).data.i32[2];
8476 int32_t y_max = frame_settings.find(tag).data.i32[3];
8477 roi.weight = frame_settings.find(tag).data.i32[4];
8478 roi.rect.left = x_min;
8479 roi.rect.top = y_min;
8480 roi.rect.width = x_max - x_min;
8481 roi.rect.height = y_max - y_min;
8482}
8483
8484/*===========================================================================
8485 * FUNCTION : resetIfNeededROI
8486 *
8487 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8488 * crop region
8489 *
8490 * PARAMETERS :
8491 * @roi : cam_area_t struct to resize
8492 * @scalerCropRegion : cam_crop_region_t region to compare against
8493 *
8494 *
8495 *==========================================================================*/
8496bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8497 const cam_crop_region_t* scalerCropRegion)
8498{
8499 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8500 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8501 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8502 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8503
8504 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8505 * without having this check the calculations below to validate if the roi
8506 * is inside scalar crop region will fail resulting in the roi not being
8507 * reset causing algorithm to continue to use stale roi window
8508 */
8509 if (roi->weight == 0) {
8510 return true;
8511 }
8512
8513 if ((roi_x_max < scalerCropRegion->left) ||
8514 // right edge of roi window is left of scalar crop's left edge
8515 (roi_y_max < scalerCropRegion->top) ||
8516 // bottom edge of roi window is above scalar crop's top edge
8517 (roi->rect.left > crop_x_max) ||
8518 // left edge of roi window is beyond(right) of scalar crop's right edge
8519 (roi->rect.top > crop_y_max)){
8520 // top edge of roi windo is above scalar crop's top edge
8521 return false;
8522 }
8523 if (roi->rect.left < scalerCropRegion->left) {
8524 roi->rect.left = scalerCropRegion->left;
8525 }
8526 if (roi->rect.top < scalerCropRegion->top) {
8527 roi->rect.top = scalerCropRegion->top;
8528 }
8529 if (roi_x_max > crop_x_max) {
8530 roi_x_max = crop_x_max;
8531 }
8532 if (roi_y_max > crop_y_max) {
8533 roi_y_max = crop_y_max;
8534 }
8535 roi->rect.width = roi_x_max - roi->rect.left;
8536 roi->rect.height = roi_y_max - roi->rect.top;
8537 return true;
8538}
8539
8540/*===========================================================================
8541 * FUNCTION : convertLandmarks
8542 *
8543 * DESCRIPTION: helper method to extract the landmarks from face detection info
8544 *
8545 * PARAMETERS :
8546 * @landmark_data : input landmark data to be converted
8547 * @landmarks : int32_t destination array
8548 *
8549 *
8550 *==========================================================================*/
8551void QCamera3HardwareInterface::convertLandmarks(
8552 cam_face_landmarks_info_t landmark_data,
8553 int32_t *landmarks)
8554{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008555 if (landmark_data.is_left_eye_valid) {
8556 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8557 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8558 } else {
8559 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8560 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8561 }
8562
8563 if (landmark_data.is_right_eye_valid) {
8564 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8565 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8566 } else {
8567 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8568 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8569 }
8570
8571 if (landmark_data.is_mouth_valid) {
8572 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8573 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8574 } else {
8575 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8576 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8577 }
8578}
8579
8580/*===========================================================================
8581 * FUNCTION : setInvalidLandmarks
8582 *
8583 * DESCRIPTION: helper method to set invalid landmarks
8584 *
8585 * PARAMETERS :
8586 * @landmarks : int32_t destination array
8587 *
8588 *
8589 *==========================================================================*/
8590void QCamera3HardwareInterface::setInvalidLandmarks(
8591 int32_t *landmarks)
8592{
8593 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8594 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8595 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8596 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8597 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8598 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008599}
8600
8601#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008602
8603/*===========================================================================
8604 * FUNCTION : getCapabilities
8605 *
8606 * DESCRIPTION: query camera capability from back-end
8607 *
8608 * PARAMETERS :
8609 * @ops : mm-interface ops structure
8610 * @cam_handle : camera handle for which we need capability
8611 *
8612 * RETURN : ptr type of capability structure
8613 * capability for success
8614 * NULL for failure
8615 *==========================================================================*/
8616cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8617 uint32_t cam_handle)
8618{
8619 int rc = NO_ERROR;
8620 QCamera3HeapMemory *capabilityHeap = NULL;
8621 cam_capability_t *cap_ptr = NULL;
8622
8623 if (ops == NULL) {
8624 LOGE("Invalid arguments");
8625 return NULL;
8626 }
8627
8628 capabilityHeap = new QCamera3HeapMemory(1);
8629 if (capabilityHeap == NULL) {
8630 LOGE("creation of capabilityHeap failed");
8631 return NULL;
8632 }
8633
8634 /* Allocate memory for capability buffer */
8635 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8636 if(rc != OK) {
8637 LOGE("No memory for cappability");
8638 goto allocate_failed;
8639 }
8640
8641 /* Map memory for capability buffer */
8642 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8643
8644 rc = ops->map_buf(cam_handle,
8645 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8646 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8647 if(rc < 0) {
8648 LOGE("failed to map capability buffer");
8649 rc = FAILED_TRANSACTION;
8650 goto map_failed;
8651 }
8652
8653 /* Query Capability */
8654 rc = ops->query_capability(cam_handle);
8655 if(rc < 0) {
8656 LOGE("failed to query capability");
8657 rc = FAILED_TRANSACTION;
8658 goto query_failed;
8659 }
8660
8661 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8662 if (cap_ptr == NULL) {
8663 LOGE("out of memory");
8664 rc = NO_MEMORY;
8665 goto query_failed;
8666 }
8667
8668 memset(cap_ptr, 0, sizeof(cam_capability_t));
8669 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8670
8671 int index;
8672 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8673 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8674 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8675 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8676 }
8677
8678query_failed:
8679 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8680map_failed:
8681 capabilityHeap->deallocate();
8682allocate_failed:
8683 delete capabilityHeap;
8684
8685 if (rc != NO_ERROR) {
8686 return NULL;
8687 } else {
8688 return cap_ptr;
8689 }
8690}
8691
Thierry Strudel3d639192016-09-09 11:52:26 -07008692/*===========================================================================
8693 * FUNCTION : initCapabilities
8694 *
8695 * DESCRIPTION: initialize camera capabilities in static data struct
8696 *
8697 * PARAMETERS :
8698 * @cameraId : camera Id
8699 *
8700 * RETURN : int32_t type of status
8701 * NO_ERROR -- success
8702 * none-zero failure code
8703 *==========================================================================*/
8704int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8705{
8706 int rc = 0;
8707 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008708 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008709
8710 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8711 if (rc) {
8712 LOGE("camera_open failed. rc = %d", rc);
8713 goto open_failed;
8714 }
8715 if (!cameraHandle) {
8716 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8717 goto open_failed;
8718 }
8719
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008720 handle = get_main_camera_handle(cameraHandle->camera_handle);
8721 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8722 if (gCamCapability[cameraId] == NULL) {
8723 rc = FAILED_TRANSACTION;
8724 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008725 }
8726
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008727 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008728 if (is_dual_camera_by_idx(cameraId)) {
8729 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8730 gCamCapability[cameraId]->aux_cam_cap =
8731 getCapabilities(cameraHandle->ops, handle);
8732 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8733 rc = FAILED_TRANSACTION;
8734 free(gCamCapability[cameraId]);
8735 goto failed_op;
8736 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008737
8738 // Copy the main camera capability to main_cam_cap struct
8739 gCamCapability[cameraId]->main_cam_cap =
8740 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8741 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8742 LOGE("out of memory");
8743 rc = NO_MEMORY;
8744 goto failed_op;
8745 }
8746 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8747 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008748 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008749failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008750 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8751 cameraHandle = NULL;
8752open_failed:
8753 return rc;
8754}
8755
8756/*==========================================================================
8757 * FUNCTION : get3Aversion
8758 *
8759 * DESCRIPTION: get the Q3A S/W version
8760 *
8761 * PARAMETERS :
8762 * @sw_version: Reference of Q3A structure which will hold version info upon
8763 * return
8764 *
8765 * RETURN : None
8766 *
8767 *==========================================================================*/
8768void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8769{
8770 if(gCamCapability[mCameraId])
8771 sw_version = gCamCapability[mCameraId]->q3a_version;
8772 else
8773 LOGE("Capability structure NULL!");
8774}
8775
8776
8777/*===========================================================================
8778 * FUNCTION : initParameters
8779 *
8780 * DESCRIPTION: initialize camera parameters
8781 *
8782 * PARAMETERS :
8783 *
8784 * RETURN : int32_t type of status
8785 * NO_ERROR -- success
8786 * none-zero failure code
8787 *==========================================================================*/
8788int QCamera3HardwareInterface::initParameters()
8789{
8790 int rc = 0;
8791
8792 //Allocate Set Param Buffer
8793 mParamHeap = new QCamera3HeapMemory(1);
8794 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8795 if(rc != OK) {
8796 rc = NO_MEMORY;
8797 LOGE("Failed to allocate SETPARM Heap memory");
8798 delete mParamHeap;
8799 mParamHeap = NULL;
8800 return rc;
8801 }
8802
8803 //Map memory for parameters buffer
8804 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8805 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8806 mParamHeap->getFd(0),
8807 sizeof(metadata_buffer_t),
8808 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8809 if(rc < 0) {
8810 LOGE("failed to map SETPARM buffer");
8811 rc = FAILED_TRANSACTION;
8812 mParamHeap->deallocate();
8813 delete mParamHeap;
8814 mParamHeap = NULL;
8815 return rc;
8816 }
8817
8818 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8819
8820 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8821 return rc;
8822}
8823
8824/*===========================================================================
8825 * FUNCTION : deinitParameters
8826 *
8827 * DESCRIPTION: de-initialize camera parameters
8828 *
8829 * PARAMETERS :
8830 *
8831 * RETURN : NONE
8832 *==========================================================================*/
8833void QCamera3HardwareInterface::deinitParameters()
8834{
8835 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8836 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8837
8838 mParamHeap->deallocate();
8839 delete mParamHeap;
8840 mParamHeap = NULL;
8841
8842 mParameters = NULL;
8843
8844 free(mPrevParameters);
8845 mPrevParameters = NULL;
8846}
8847
8848/*===========================================================================
8849 * FUNCTION : calcMaxJpegSize
8850 *
8851 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8852 *
8853 * PARAMETERS :
8854 *
8855 * RETURN : max_jpeg_size
8856 *==========================================================================*/
8857size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8858{
8859 size_t max_jpeg_size = 0;
8860 size_t temp_width, temp_height;
8861 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8862 MAX_SIZES_CNT);
8863 for (size_t i = 0; i < count; i++) {
8864 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8865 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8866 if (temp_width * temp_height > max_jpeg_size ) {
8867 max_jpeg_size = temp_width * temp_height;
8868 }
8869 }
8870 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8871 return max_jpeg_size;
8872}
8873
8874/*===========================================================================
8875 * FUNCTION : getMaxRawSize
8876 *
8877 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8878 *
8879 * PARAMETERS :
8880 *
8881 * RETURN : Largest supported Raw Dimension
8882 *==========================================================================*/
8883cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8884{
8885 int max_width = 0;
8886 cam_dimension_t maxRawSize;
8887
8888 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8889 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8890 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8891 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8892 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8893 }
8894 }
8895 return maxRawSize;
8896}
8897
8898
8899/*===========================================================================
8900 * FUNCTION : calcMaxJpegDim
8901 *
8902 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8903 *
8904 * PARAMETERS :
8905 *
8906 * RETURN : max_jpeg_dim
8907 *==========================================================================*/
8908cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8909{
8910 cam_dimension_t max_jpeg_dim;
8911 cam_dimension_t curr_jpeg_dim;
8912 max_jpeg_dim.width = 0;
8913 max_jpeg_dim.height = 0;
8914 curr_jpeg_dim.width = 0;
8915 curr_jpeg_dim.height = 0;
8916 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8917 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8918 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8919 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8920 max_jpeg_dim.width * max_jpeg_dim.height ) {
8921 max_jpeg_dim.width = curr_jpeg_dim.width;
8922 max_jpeg_dim.height = curr_jpeg_dim.height;
8923 }
8924 }
8925 return max_jpeg_dim;
8926}
8927
8928/*===========================================================================
8929 * FUNCTION : addStreamConfig
8930 *
8931 * DESCRIPTION: adds the stream configuration to the array
8932 *
8933 * PARAMETERS :
8934 * @available_stream_configs : pointer to stream configuration array
8935 * @scalar_format : scalar format
8936 * @dim : configuration dimension
8937 * @config_type : input or output configuration type
8938 *
8939 * RETURN : NONE
8940 *==========================================================================*/
8941void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8942 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8943{
8944 available_stream_configs.add(scalar_format);
8945 available_stream_configs.add(dim.width);
8946 available_stream_configs.add(dim.height);
8947 available_stream_configs.add(config_type);
8948}
8949
8950/*===========================================================================
8951 * FUNCTION : suppportBurstCapture
8952 *
8953 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8954 *
8955 * PARAMETERS :
8956 * @cameraId : camera Id
8957 *
8958 * RETURN : true if camera supports BURST_CAPTURE
8959 * false otherwise
8960 *==========================================================================*/
8961bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8962{
8963 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8964 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8965 const int32_t highResWidth = 3264;
8966 const int32_t highResHeight = 2448;
8967
8968 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8969 // Maximum resolution images cannot be captured at >= 10fps
8970 // -> not supporting BURST_CAPTURE
8971 return false;
8972 }
8973
8974 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8975 // Maximum resolution images can be captured at >= 20fps
8976 // --> supporting BURST_CAPTURE
8977 return true;
8978 }
8979
8980 // Find the smallest highRes resolution, or largest resolution if there is none
8981 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8982 MAX_SIZES_CNT);
8983 size_t highRes = 0;
8984 while ((highRes + 1 < totalCnt) &&
8985 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8986 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8987 highResWidth * highResHeight)) {
8988 highRes++;
8989 }
8990 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8991 return true;
8992 } else {
8993 return false;
8994 }
8995}
8996
8997/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008998 * FUNCTION : getPDStatIndex
8999 *
9000 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9001 *
9002 * PARAMETERS :
9003 * @caps : camera capabilities
9004 *
9005 * RETURN : int32_t type
9006 * non-negative - on success
9007 * -1 - on failure
9008 *==========================================================================*/
9009int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9010 if (nullptr == caps) {
9011 return -1;
9012 }
9013
9014 uint32_t metaRawCount = caps->meta_raw_channel_count;
9015 int32_t ret = -1;
9016 for (size_t i = 0; i < metaRawCount; i++) {
9017 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9018 ret = i;
9019 break;
9020 }
9021 }
9022
9023 return ret;
9024}
9025
9026/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009027 * FUNCTION : initStaticMetadata
9028 *
9029 * DESCRIPTION: initialize the static metadata
9030 *
9031 * PARAMETERS :
9032 * @cameraId : camera Id
9033 *
9034 * RETURN : int32_t type of status
9035 * 0 -- success
9036 * non-zero failure code
9037 *==========================================================================*/
9038int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9039{
9040 int rc = 0;
9041 CameraMetadata staticInfo;
9042 size_t count = 0;
9043 bool limitedDevice = false;
9044 char prop[PROPERTY_VALUE_MAX];
9045 bool supportBurst = false;
9046
9047 supportBurst = supportBurstCapture(cameraId);
9048
9049 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9050 * guaranteed or if min fps of max resolution is less than 20 fps, its
9051 * advertised as limited device*/
9052 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9053 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9054 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9055 !supportBurst;
9056
9057 uint8_t supportedHwLvl = limitedDevice ?
9058 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009059#ifndef USE_HAL_3_3
9060 // LEVEL_3 - This device will support level 3.
9061 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9062#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009063 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009064#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009065
9066 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9067 &supportedHwLvl, 1);
9068
9069 bool facingBack = false;
9070 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9071 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9072 facingBack = true;
9073 }
9074 /*HAL 3 only*/
9075 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9076 &gCamCapability[cameraId]->min_focus_distance, 1);
9077
9078 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9079 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9080
9081 /*should be using focal lengths but sensor doesn't provide that info now*/
9082 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9083 &gCamCapability[cameraId]->focal_length,
9084 1);
9085
9086 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9087 gCamCapability[cameraId]->apertures,
9088 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9089
9090 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9091 gCamCapability[cameraId]->filter_densities,
9092 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9093
9094
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009095 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9096 size_t mode_count =
9097 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9098 for (size_t i = 0; i < mode_count; i++) {
9099 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9100 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009101 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009102 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009103
9104 int32_t lens_shading_map_size[] = {
9105 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9106 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9107 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9108 lens_shading_map_size,
9109 sizeof(lens_shading_map_size)/sizeof(int32_t));
9110
9111 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9112 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9113
9114 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9115 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9116
9117 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9118 &gCamCapability[cameraId]->max_frame_duration, 1);
9119
9120 camera_metadata_rational baseGainFactor = {
9121 gCamCapability[cameraId]->base_gain_factor.numerator,
9122 gCamCapability[cameraId]->base_gain_factor.denominator};
9123 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9124 &baseGainFactor, 1);
9125
9126 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9127 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9128
9129 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9130 gCamCapability[cameraId]->pixel_array_size.height};
9131 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9132 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9133
9134 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9135 gCamCapability[cameraId]->active_array_size.top,
9136 gCamCapability[cameraId]->active_array_size.width,
9137 gCamCapability[cameraId]->active_array_size.height};
9138 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9139 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9140
9141 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9142 &gCamCapability[cameraId]->white_level, 1);
9143
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009144 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9145 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9146 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009147 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009148 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009149
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009150#ifndef USE_HAL_3_3
9151 bool hasBlackRegions = false;
9152 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9153 LOGW("black_region_count: %d is bounded to %d",
9154 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9155 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9156 }
9157 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9158 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9159 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9160 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9161 }
9162 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9163 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9164 hasBlackRegions = true;
9165 }
9166#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009167 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9168 &gCamCapability[cameraId]->flash_charge_duration, 1);
9169
9170 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9171 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9172
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009173 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9174 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9175 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009176 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9177 &timestampSource, 1);
9178
Thierry Strudel54dc9782017-02-15 12:12:10 -08009179 //update histogram vendor data
9180 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009181 &gCamCapability[cameraId]->histogram_size, 1);
9182
Thierry Strudel54dc9782017-02-15 12:12:10 -08009183 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009184 &gCamCapability[cameraId]->max_histogram_count, 1);
9185
Shuzhen Wang14415f52016-11-16 18:26:18 -08009186 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9187 //so that app can request fewer number of bins than the maximum supported.
9188 std::vector<int32_t> histBins;
9189 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9190 histBins.push_back(maxHistBins);
9191 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9192 (maxHistBins & 0x1) == 0) {
9193 histBins.push_back(maxHistBins >> 1);
9194 maxHistBins >>= 1;
9195 }
9196 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9197 histBins.data(), histBins.size());
9198
Thierry Strudel3d639192016-09-09 11:52:26 -07009199 int32_t sharpness_map_size[] = {
9200 gCamCapability[cameraId]->sharpness_map_size.width,
9201 gCamCapability[cameraId]->sharpness_map_size.height};
9202
9203 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9204 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9205
9206 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9207 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9208
Emilian Peev0f3c3162017-03-15 12:57:46 +00009209 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9210 if (0 <= indexPD) {
9211 // Advertise PD stats data as part of the Depth capabilities
9212 int32_t depthWidth =
9213 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9214 int32_t depthHeight =
9215 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9216 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9217 assert(0 < depthSamplesCount);
9218 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9219 &depthSamplesCount, 1);
9220
9221 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9222 depthHeight,
9223 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9224 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9225 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9226 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9227 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9228
9229 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9230 depthHeight, 33333333,
9231 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9232 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9233 depthMinDuration,
9234 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9235
9236 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9237 depthHeight, 0,
9238 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9239 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9240 depthStallDuration,
9241 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9242
9243 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9244 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9245 }
9246
Thierry Strudel3d639192016-09-09 11:52:26 -07009247 int32_t scalar_formats[] = {
9248 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9249 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9250 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9251 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9252 HAL_PIXEL_FORMAT_RAW10,
9253 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009254 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9255 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9256 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009257
9258 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9259 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9260 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9261 count, MAX_SIZES_CNT, available_processed_sizes);
9262 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9263 available_processed_sizes, count * 2);
9264
9265 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9266 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9267 makeTable(gCamCapability[cameraId]->raw_dim,
9268 count, MAX_SIZES_CNT, available_raw_sizes);
9269 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9270 available_raw_sizes, count * 2);
9271
9272 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9273 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9274 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9275 count, MAX_SIZES_CNT, available_fps_ranges);
9276 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9277 available_fps_ranges, count * 2);
9278
9279 camera_metadata_rational exposureCompensationStep = {
9280 gCamCapability[cameraId]->exp_compensation_step.numerator,
9281 gCamCapability[cameraId]->exp_compensation_step.denominator};
9282 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9283 &exposureCompensationStep, 1);
9284
9285 Vector<uint8_t> availableVstabModes;
9286 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9287 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009288 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009289 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009290 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009291 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009292 count = IS_TYPE_MAX;
9293 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9294 for (size_t i = 0; i < count; i++) {
9295 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9296 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9297 eisSupported = true;
9298 break;
9299 }
9300 }
9301 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009302 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9303 }
9304 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9305 availableVstabModes.array(), availableVstabModes.size());
9306
9307 /*HAL 1 and HAL 3 common*/
9308 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9309 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9310 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009311 // Cap the max zoom to the max preferred value
9312 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009313 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9314 &maxZoom, 1);
9315
9316 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9317 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9318
9319 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9320 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9321 max3aRegions[2] = 0; /* AF not supported */
9322 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9323 max3aRegions, 3);
9324
9325 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9326 memset(prop, 0, sizeof(prop));
9327 property_get("persist.camera.facedetect", prop, "1");
9328 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9329 LOGD("Support face detection mode: %d",
9330 supportedFaceDetectMode);
9331
9332 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009333 /* support mode should be OFF if max number of face is 0 */
9334 if (maxFaces <= 0) {
9335 supportedFaceDetectMode = 0;
9336 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009337 Vector<uint8_t> availableFaceDetectModes;
9338 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9339 if (supportedFaceDetectMode == 1) {
9340 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9341 } else if (supportedFaceDetectMode == 2) {
9342 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9343 } else if (supportedFaceDetectMode == 3) {
9344 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9345 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9346 } else {
9347 maxFaces = 0;
9348 }
9349 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9350 availableFaceDetectModes.array(),
9351 availableFaceDetectModes.size());
9352 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9353 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009354 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9355 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9356 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009357
9358 int32_t exposureCompensationRange[] = {
9359 gCamCapability[cameraId]->exposure_compensation_min,
9360 gCamCapability[cameraId]->exposure_compensation_max};
9361 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9362 exposureCompensationRange,
9363 sizeof(exposureCompensationRange)/sizeof(int32_t));
9364
9365 uint8_t lensFacing = (facingBack) ?
9366 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9367 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9368
9369 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9370 available_thumbnail_sizes,
9371 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9372
9373 /*all sizes will be clubbed into this tag*/
9374 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9375 /*android.scaler.availableStreamConfigurations*/
9376 Vector<int32_t> available_stream_configs;
9377 cam_dimension_t active_array_dim;
9378 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9379 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009380
9381 /*advertise list of input dimensions supported based on below property.
9382 By default all sizes upto 5MP will be advertised.
9383 Note that the setprop resolution format should be WxH.
9384 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9385 To list all supported sizes, setprop needs to be set with "0x0" */
9386 cam_dimension_t minInputSize = {2592,1944}; //5MP
9387 memset(prop, 0, sizeof(prop));
9388 property_get("persist.camera.input.minsize", prop, "2592x1944");
9389 if (strlen(prop) > 0) {
9390 char *saveptr = NULL;
9391 char *token = strtok_r(prop, "x", &saveptr);
9392 if (token != NULL) {
9393 minInputSize.width = atoi(token);
9394 }
9395 token = strtok_r(NULL, "x", &saveptr);
9396 if (token != NULL) {
9397 minInputSize.height = atoi(token);
9398 }
9399 }
9400
Thierry Strudel3d639192016-09-09 11:52:26 -07009401 /* Add input/output stream configurations for each scalar formats*/
9402 for (size_t j = 0; j < scalar_formats_count; j++) {
9403 switch (scalar_formats[j]) {
9404 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9405 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9406 case HAL_PIXEL_FORMAT_RAW10:
9407 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9408 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9409 addStreamConfig(available_stream_configs, scalar_formats[j],
9410 gCamCapability[cameraId]->raw_dim[i],
9411 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9412 }
9413 break;
9414 case HAL_PIXEL_FORMAT_BLOB:
9415 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9416 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9417 addStreamConfig(available_stream_configs, scalar_formats[j],
9418 gCamCapability[cameraId]->picture_sizes_tbl[i],
9419 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9420 }
9421 break;
9422 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9423 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9424 default:
9425 cam_dimension_t largest_picture_size;
9426 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9427 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9428 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9429 addStreamConfig(available_stream_configs, scalar_formats[j],
9430 gCamCapability[cameraId]->picture_sizes_tbl[i],
9431 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009432 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009433 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9434 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009435 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9436 >= minInputSize.width) || (gCamCapability[cameraId]->
9437 picture_sizes_tbl[i].height >= minInputSize.height)) {
9438 addStreamConfig(available_stream_configs, scalar_formats[j],
9439 gCamCapability[cameraId]->picture_sizes_tbl[i],
9440 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9441 }
9442 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009443 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009444
Thierry Strudel3d639192016-09-09 11:52:26 -07009445 break;
9446 }
9447 }
9448
9449 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9450 available_stream_configs.array(), available_stream_configs.size());
9451 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9452 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9453
9454 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9455 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9456
9457 /* android.scaler.availableMinFrameDurations */
9458 Vector<int64_t> available_min_durations;
9459 for (size_t j = 0; j < scalar_formats_count; j++) {
9460 switch (scalar_formats[j]) {
9461 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9462 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9463 case HAL_PIXEL_FORMAT_RAW10:
9464 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9465 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9466 available_min_durations.add(scalar_formats[j]);
9467 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9468 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9469 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9470 }
9471 break;
9472 default:
9473 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9474 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9475 available_min_durations.add(scalar_formats[j]);
9476 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9477 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9478 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9479 }
9480 break;
9481 }
9482 }
9483 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9484 available_min_durations.array(), available_min_durations.size());
9485
9486 Vector<int32_t> available_hfr_configs;
9487 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9488 int32_t fps = 0;
9489 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9490 case CAM_HFR_MODE_60FPS:
9491 fps = 60;
9492 break;
9493 case CAM_HFR_MODE_90FPS:
9494 fps = 90;
9495 break;
9496 case CAM_HFR_MODE_120FPS:
9497 fps = 120;
9498 break;
9499 case CAM_HFR_MODE_150FPS:
9500 fps = 150;
9501 break;
9502 case CAM_HFR_MODE_180FPS:
9503 fps = 180;
9504 break;
9505 case CAM_HFR_MODE_210FPS:
9506 fps = 210;
9507 break;
9508 case CAM_HFR_MODE_240FPS:
9509 fps = 240;
9510 break;
9511 case CAM_HFR_MODE_480FPS:
9512 fps = 480;
9513 break;
9514 case CAM_HFR_MODE_OFF:
9515 case CAM_HFR_MODE_MAX:
9516 default:
9517 break;
9518 }
9519
9520 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9521 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9522 /* For each HFR frame rate, need to advertise one variable fps range
9523 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9524 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9525 * set by the app. When video recording is started, [120, 120] is
9526 * set. This way sensor configuration does not change when recording
9527 * is started */
9528
9529 /* (width, height, fps_min, fps_max, batch_size_max) */
9530 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9531 j < MAX_SIZES_CNT; j++) {
9532 available_hfr_configs.add(
9533 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9534 available_hfr_configs.add(
9535 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9536 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9537 available_hfr_configs.add(fps);
9538 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9539
9540 /* (width, height, fps_min, fps_max, batch_size_max) */
9541 available_hfr_configs.add(
9542 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9543 available_hfr_configs.add(
9544 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9545 available_hfr_configs.add(fps);
9546 available_hfr_configs.add(fps);
9547 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9548 }
9549 }
9550 }
9551 //Advertise HFR capability only if the property is set
9552 memset(prop, 0, sizeof(prop));
9553 property_get("persist.camera.hal3hfr.enable", prop, "1");
9554 uint8_t hfrEnable = (uint8_t)atoi(prop);
9555
9556 if(hfrEnable && available_hfr_configs.array()) {
9557 staticInfo.update(
9558 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9559 available_hfr_configs.array(), available_hfr_configs.size());
9560 }
9561
9562 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9563 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9564 &max_jpeg_size, 1);
9565
9566 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9567 size_t size = 0;
9568 count = CAM_EFFECT_MODE_MAX;
9569 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9570 for (size_t i = 0; i < count; i++) {
9571 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9572 gCamCapability[cameraId]->supported_effects[i]);
9573 if (NAME_NOT_FOUND != val) {
9574 avail_effects[size] = (uint8_t)val;
9575 size++;
9576 }
9577 }
9578 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9579 avail_effects,
9580 size);
9581
9582 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9583 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9584 size_t supported_scene_modes_cnt = 0;
9585 count = CAM_SCENE_MODE_MAX;
9586 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9587 for (size_t i = 0; i < count; i++) {
9588 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9589 CAM_SCENE_MODE_OFF) {
9590 int val = lookupFwkName(SCENE_MODES_MAP,
9591 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9592 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009593
Thierry Strudel3d639192016-09-09 11:52:26 -07009594 if (NAME_NOT_FOUND != val) {
9595 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9596 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9597 supported_scene_modes_cnt++;
9598 }
9599 }
9600 }
9601 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9602 avail_scene_modes,
9603 supported_scene_modes_cnt);
9604
9605 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9606 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9607 supported_scene_modes_cnt,
9608 CAM_SCENE_MODE_MAX,
9609 scene_mode_overrides,
9610 supported_indexes,
9611 cameraId);
9612
9613 if (supported_scene_modes_cnt == 0) {
9614 supported_scene_modes_cnt = 1;
9615 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9616 }
9617
9618 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9619 scene_mode_overrides, supported_scene_modes_cnt * 3);
9620
9621 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9622 ANDROID_CONTROL_MODE_AUTO,
9623 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9624 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9625 available_control_modes,
9626 3);
9627
9628 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9629 size = 0;
9630 count = CAM_ANTIBANDING_MODE_MAX;
9631 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9632 for (size_t i = 0; i < count; i++) {
9633 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9634 gCamCapability[cameraId]->supported_antibandings[i]);
9635 if (NAME_NOT_FOUND != val) {
9636 avail_antibanding_modes[size] = (uint8_t)val;
9637 size++;
9638 }
9639
9640 }
9641 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9642 avail_antibanding_modes,
9643 size);
9644
9645 uint8_t avail_abberation_modes[] = {
9646 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9647 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9648 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9649 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9650 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9651 if (0 == count) {
9652 // If no aberration correction modes are available for a device, this advertise OFF mode
9653 size = 1;
9654 } else {
9655 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9656 // So, advertize all 3 modes if atleast any one mode is supported as per the
9657 // new M requirement
9658 size = 3;
9659 }
9660 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9661 avail_abberation_modes,
9662 size);
9663
9664 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9665 size = 0;
9666 count = CAM_FOCUS_MODE_MAX;
9667 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9668 for (size_t i = 0; i < count; i++) {
9669 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9670 gCamCapability[cameraId]->supported_focus_modes[i]);
9671 if (NAME_NOT_FOUND != val) {
9672 avail_af_modes[size] = (uint8_t)val;
9673 size++;
9674 }
9675 }
9676 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9677 avail_af_modes,
9678 size);
9679
9680 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9681 size = 0;
9682 count = CAM_WB_MODE_MAX;
9683 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9684 for (size_t i = 0; i < count; i++) {
9685 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9686 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9687 gCamCapability[cameraId]->supported_white_balances[i]);
9688 if (NAME_NOT_FOUND != val) {
9689 avail_awb_modes[size] = (uint8_t)val;
9690 size++;
9691 }
9692 }
9693 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9694 avail_awb_modes,
9695 size);
9696
9697 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9698 count = CAM_FLASH_FIRING_LEVEL_MAX;
9699 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9700 count);
9701 for (size_t i = 0; i < count; i++) {
9702 available_flash_levels[i] =
9703 gCamCapability[cameraId]->supported_firing_levels[i];
9704 }
9705 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9706 available_flash_levels, count);
9707
9708 uint8_t flashAvailable;
9709 if (gCamCapability[cameraId]->flash_available)
9710 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9711 else
9712 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9713 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9714 &flashAvailable, 1);
9715
9716 Vector<uint8_t> avail_ae_modes;
9717 count = CAM_AE_MODE_MAX;
9718 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9719 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009720 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9721 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9722 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9723 }
9724 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009725 }
9726 if (flashAvailable) {
9727 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9728 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9729 }
9730 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9731 avail_ae_modes.array(),
9732 avail_ae_modes.size());
9733
9734 int32_t sensitivity_range[2];
9735 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9736 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9737 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9738 sensitivity_range,
9739 sizeof(sensitivity_range) / sizeof(int32_t));
9740
9741 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9742 &gCamCapability[cameraId]->max_analog_sensitivity,
9743 1);
9744
9745 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9746 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9747 &sensor_orientation,
9748 1);
9749
9750 int32_t max_output_streams[] = {
9751 MAX_STALLING_STREAMS,
9752 MAX_PROCESSED_STREAMS,
9753 MAX_RAW_STREAMS};
9754 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9755 max_output_streams,
9756 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9757
9758 uint8_t avail_leds = 0;
9759 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9760 &avail_leds, 0);
9761
9762 uint8_t focus_dist_calibrated;
9763 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9764 gCamCapability[cameraId]->focus_dist_calibrated);
9765 if (NAME_NOT_FOUND != val) {
9766 focus_dist_calibrated = (uint8_t)val;
9767 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9768 &focus_dist_calibrated, 1);
9769 }
9770
9771 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9772 size = 0;
9773 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9774 MAX_TEST_PATTERN_CNT);
9775 for (size_t i = 0; i < count; i++) {
9776 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9777 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9778 if (NAME_NOT_FOUND != testpatternMode) {
9779 avail_testpattern_modes[size] = testpatternMode;
9780 size++;
9781 }
9782 }
9783 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9784 avail_testpattern_modes,
9785 size);
9786
9787 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9788 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9789 &max_pipeline_depth,
9790 1);
9791
9792 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9793 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9794 &partial_result_count,
9795 1);
9796
9797 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9798 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9799
9800 Vector<uint8_t> available_capabilities;
9801 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9802 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9803 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9804 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9805 if (supportBurst) {
9806 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9807 }
9808 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9809 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9810 if (hfrEnable && available_hfr_configs.array()) {
9811 available_capabilities.add(
9812 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9813 }
9814
9815 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9816 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9817 }
9818 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9819 available_capabilities.array(),
9820 available_capabilities.size());
9821
9822 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9823 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9824 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9825 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9826
9827 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9828 &aeLockAvailable, 1);
9829
9830 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9831 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9832 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9833 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9834
9835 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9836 &awbLockAvailable, 1);
9837
9838 int32_t max_input_streams = 1;
9839 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9840 &max_input_streams,
9841 1);
9842
9843 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9844 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9845 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9846 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9847 HAL_PIXEL_FORMAT_YCbCr_420_888};
9848 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9849 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9850
9851 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9852 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9853 &max_latency,
9854 1);
9855
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009856#ifndef USE_HAL_3_3
9857 int32_t isp_sensitivity_range[2];
9858 isp_sensitivity_range[0] =
9859 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9860 isp_sensitivity_range[1] =
9861 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9862 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9863 isp_sensitivity_range,
9864 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9865#endif
9866
Thierry Strudel3d639192016-09-09 11:52:26 -07009867 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9868 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9869 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9870 available_hot_pixel_modes,
9871 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9872
9873 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9874 ANDROID_SHADING_MODE_FAST,
9875 ANDROID_SHADING_MODE_HIGH_QUALITY};
9876 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9877 available_shading_modes,
9878 3);
9879
9880 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9881 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9882 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9883 available_lens_shading_map_modes,
9884 2);
9885
9886 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9887 ANDROID_EDGE_MODE_FAST,
9888 ANDROID_EDGE_MODE_HIGH_QUALITY,
9889 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9890 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9891 available_edge_modes,
9892 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9893
9894 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9895 ANDROID_NOISE_REDUCTION_MODE_FAST,
9896 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9897 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9898 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9899 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9900 available_noise_red_modes,
9901 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9902
9903 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9904 ANDROID_TONEMAP_MODE_FAST,
9905 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9906 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9907 available_tonemap_modes,
9908 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9909
9910 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9911 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9912 available_hot_pixel_map_modes,
9913 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9914
9915 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9916 gCamCapability[cameraId]->reference_illuminant1);
9917 if (NAME_NOT_FOUND != val) {
9918 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9919 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9920 }
9921
9922 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9923 gCamCapability[cameraId]->reference_illuminant2);
9924 if (NAME_NOT_FOUND != val) {
9925 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9926 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9927 }
9928
9929 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9930 (void *)gCamCapability[cameraId]->forward_matrix1,
9931 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9932
9933 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9934 (void *)gCamCapability[cameraId]->forward_matrix2,
9935 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9936
9937 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9938 (void *)gCamCapability[cameraId]->color_transform1,
9939 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9940
9941 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9942 (void *)gCamCapability[cameraId]->color_transform2,
9943 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9944
9945 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9946 (void *)gCamCapability[cameraId]->calibration_transform1,
9947 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9948
9949 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9950 (void *)gCamCapability[cameraId]->calibration_transform2,
9951 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9952
9953 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9954 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9955 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9956 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9957 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9958 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9959 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9960 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9961 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9962 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9963 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9964 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9965 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9966 ANDROID_JPEG_GPS_COORDINATES,
9967 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9968 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9969 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9970 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9971 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9972 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9973 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9974 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9975 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9976 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009977#ifndef USE_HAL_3_3
9978 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9979#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009980 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009981 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009982 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9983 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009984 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009985 /* DevCamDebug metadata request_keys_basic */
9986 DEVCAMDEBUG_META_ENABLE,
9987 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009988 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07009989 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07009990 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -07009991 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -08009992 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009993
9994 size_t request_keys_cnt =
9995 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9996 Vector<int32_t> available_request_keys;
9997 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9998 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9999 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10000 }
10001
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010002 if (gExposeEnableZslKey) {
Chien-Yu Chen3b630e52017-06-02 15:39:47 -070010003 if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || cameraId == 0) {
10004 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10005 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010006 }
10007
Thierry Strudel3d639192016-09-09 11:52:26 -070010008 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10009 available_request_keys.array(), available_request_keys.size());
10010
10011 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10012 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10013 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10014 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10015 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10016 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10017 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10018 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10019 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10020 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10021 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10022 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10023 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10024 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10025 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10026 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10027 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010028 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010029 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10030 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10031 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010032 ANDROID_STATISTICS_FACE_SCORES,
10033#ifndef USE_HAL_3_3
10034 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10035#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010036 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010037 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010038 // DevCamDebug metadata result_keys_basic
10039 DEVCAMDEBUG_META_ENABLE,
10040 // DevCamDebug metadata result_keys AF
10041 DEVCAMDEBUG_AF_LENS_POSITION,
10042 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10043 DEVCAMDEBUG_AF_TOF_DISTANCE,
10044 DEVCAMDEBUG_AF_LUMA,
10045 DEVCAMDEBUG_AF_HAF_STATE,
10046 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10047 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10048 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10049 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10050 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10051 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10052 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10053 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10054 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10055 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10056 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10057 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10058 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10059 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10060 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10061 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10062 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10063 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10064 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10065 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10066 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10067 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10068 // DevCamDebug metadata result_keys AEC
10069 DEVCAMDEBUG_AEC_TARGET_LUMA,
10070 DEVCAMDEBUG_AEC_COMP_LUMA,
10071 DEVCAMDEBUG_AEC_AVG_LUMA,
10072 DEVCAMDEBUG_AEC_CUR_LUMA,
10073 DEVCAMDEBUG_AEC_LINECOUNT,
10074 DEVCAMDEBUG_AEC_REAL_GAIN,
10075 DEVCAMDEBUG_AEC_EXP_INDEX,
10076 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010077 // DevCamDebug metadata result_keys zzHDR
10078 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10079 DEVCAMDEBUG_AEC_L_LINECOUNT,
10080 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10081 DEVCAMDEBUG_AEC_S_LINECOUNT,
10082 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10083 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10084 // DevCamDebug metadata result_keys ADRC
10085 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10086 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10087 DEVCAMDEBUG_AEC_GTM_RATIO,
10088 DEVCAMDEBUG_AEC_LTM_RATIO,
10089 DEVCAMDEBUG_AEC_LA_RATIO,
10090 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010091 // DevCamDebug metadata result_keys AWB
10092 DEVCAMDEBUG_AWB_R_GAIN,
10093 DEVCAMDEBUG_AWB_G_GAIN,
10094 DEVCAMDEBUG_AWB_B_GAIN,
10095 DEVCAMDEBUG_AWB_CCT,
10096 DEVCAMDEBUG_AWB_DECISION,
10097 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010098 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10099 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10100 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010101 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010102 };
10103
Thierry Strudel3d639192016-09-09 11:52:26 -070010104 size_t result_keys_cnt =
10105 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10106
10107 Vector<int32_t> available_result_keys;
10108 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10109 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10110 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10111 }
10112 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10113 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10114 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10115 }
10116 if (supportedFaceDetectMode == 1) {
10117 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10118 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10119 } else if ((supportedFaceDetectMode == 2) ||
10120 (supportedFaceDetectMode == 3)) {
10121 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10122 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10123 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010124#ifndef USE_HAL_3_3
10125 if (hasBlackRegions) {
10126 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10127 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10128 }
10129#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010130
10131 if (gExposeEnableZslKey) {
10132 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10133 }
10134
Thierry Strudel3d639192016-09-09 11:52:26 -070010135 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10136 available_result_keys.array(), available_result_keys.size());
10137
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010138 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010139 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10140 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10141 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10142 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10143 ANDROID_SCALER_CROPPING_TYPE,
10144 ANDROID_SYNC_MAX_LATENCY,
10145 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10146 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10147 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10148 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10149 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10150 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10151 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10152 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10153 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10154 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10155 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10156 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10157 ANDROID_LENS_FACING,
10158 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10159 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10160 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10161 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10162 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10163 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10164 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10165 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10166 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10167 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10168 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10169 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10170 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10171 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10172 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10173 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10174 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10175 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10176 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10177 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010178 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010179 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10180 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10181 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10182 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10183 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10184 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10185 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10186 ANDROID_CONTROL_AVAILABLE_MODES,
10187 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10188 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10189 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10190 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010191 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10192#ifndef USE_HAL_3_3
10193 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10194 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10195#endif
10196 };
10197
10198 Vector<int32_t> available_characteristics_keys;
10199 available_characteristics_keys.appendArray(characteristics_keys_basic,
10200 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10201#ifndef USE_HAL_3_3
10202 if (hasBlackRegions) {
10203 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10204 }
10205#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010206
10207 if (0 <= indexPD) {
10208 int32_t depthKeys[] = {
10209 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10210 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10211 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10212 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10213 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10214 };
10215 available_characteristics_keys.appendArray(depthKeys,
10216 sizeof(depthKeys) / sizeof(depthKeys[0]));
10217 }
10218
Thierry Strudel3d639192016-09-09 11:52:26 -070010219 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010220 available_characteristics_keys.array(),
10221 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010222
10223 /*available stall durations depend on the hw + sw and will be different for different devices */
10224 /*have to add for raw after implementation*/
10225 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10226 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10227
10228 Vector<int64_t> available_stall_durations;
10229 for (uint32_t j = 0; j < stall_formats_count; j++) {
10230 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10231 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10232 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10233 available_stall_durations.add(stall_formats[j]);
10234 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10235 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10236 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10237 }
10238 } else {
10239 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10240 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10241 available_stall_durations.add(stall_formats[j]);
10242 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10243 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10244 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10245 }
10246 }
10247 }
10248 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10249 available_stall_durations.array(),
10250 available_stall_durations.size());
10251
10252 //QCAMERA3_OPAQUE_RAW
10253 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10254 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10255 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10256 case LEGACY_RAW:
10257 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10258 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10259 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10260 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10261 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10262 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10263 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10264 break;
10265 case MIPI_RAW:
10266 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10267 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10268 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10269 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10270 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10271 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10272 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10273 break;
10274 default:
10275 LOGE("unknown opaque_raw_format %d",
10276 gCamCapability[cameraId]->opaque_raw_fmt);
10277 break;
10278 }
10279 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10280
10281 Vector<int32_t> strides;
10282 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10283 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10284 cam_stream_buf_plane_info_t buf_planes;
10285 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10286 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10287 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10288 &gCamCapability[cameraId]->padding_info, &buf_planes);
10289 strides.add(buf_planes.plane_info.mp[0].stride);
10290 }
10291 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10292 strides.size());
10293
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010294 //TBD: remove the following line once backend advertises zzHDR in feature mask
10295 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010296 //Video HDR default
10297 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10298 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010299 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010300 int32_t vhdr_mode[] = {
10301 QCAMERA3_VIDEO_HDR_MODE_OFF,
10302 QCAMERA3_VIDEO_HDR_MODE_ON};
10303
10304 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10305 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10306 vhdr_mode, vhdr_mode_count);
10307 }
10308
Thierry Strudel3d639192016-09-09 11:52:26 -070010309 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10310 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10311 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10312
10313 uint8_t isMonoOnly =
10314 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10315 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10316 &isMonoOnly, 1);
10317
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010318#ifndef USE_HAL_3_3
10319 Vector<int32_t> opaque_size;
10320 for (size_t j = 0; j < scalar_formats_count; j++) {
10321 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10322 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10323 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10324 cam_stream_buf_plane_info_t buf_planes;
10325
10326 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10327 &gCamCapability[cameraId]->padding_info, &buf_planes);
10328
10329 if (rc == 0) {
10330 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10331 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10332 opaque_size.add(buf_planes.plane_info.frame_len);
10333 }else {
10334 LOGE("raw frame calculation failed!");
10335 }
10336 }
10337 }
10338 }
10339
10340 if ((opaque_size.size() > 0) &&
10341 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10342 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10343 else
10344 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10345#endif
10346
Thierry Strudel04e026f2016-10-10 11:27:36 -070010347 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10348 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10349 size = 0;
10350 count = CAM_IR_MODE_MAX;
10351 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10352 for (size_t i = 0; i < count; i++) {
10353 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10354 gCamCapability[cameraId]->supported_ir_modes[i]);
10355 if (NAME_NOT_FOUND != val) {
10356 avail_ir_modes[size] = (int32_t)val;
10357 size++;
10358 }
10359 }
10360 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10361 avail_ir_modes, size);
10362 }
10363
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010364 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10365 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10366 size = 0;
10367 count = CAM_AEC_CONVERGENCE_MAX;
10368 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10369 for (size_t i = 0; i < count; i++) {
10370 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10371 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10372 if (NAME_NOT_FOUND != val) {
10373 available_instant_aec_modes[size] = (int32_t)val;
10374 size++;
10375 }
10376 }
10377 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10378 available_instant_aec_modes, size);
10379 }
10380
Thierry Strudel54dc9782017-02-15 12:12:10 -080010381 int32_t sharpness_range[] = {
10382 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10383 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10384 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10385
10386 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10387 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10388 size = 0;
10389 count = CAM_BINNING_CORRECTION_MODE_MAX;
10390 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10391 for (size_t i = 0; i < count; i++) {
10392 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10393 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10394 gCamCapability[cameraId]->supported_binning_modes[i]);
10395 if (NAME_NOT_FOUND != val) {
10396 avail_binning_modes[size] = (int32_t)val;
10397 size++;
10398 }
10399 }
10400 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10401 avail_binning_modes, size);
10402 }
10403
10404 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10405 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10406 size = 0;
10407 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10408 for (size_t i = 0; i < count; i++) {
10409 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10410 gCamCapability[cameraId]->supported_aec_modes[i]);
10411 if (NAME_NOT_FOUND != val)
10412 available_aec_modes[size++] = val;
10413 }
10414 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10415 available_aec_modes, size);
10416 }
10417
10418 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10419 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10420 size = 0;
10421 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10422 for (size_t i = 0; i < count; i++) {
10423 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10424 gCamCapability[cameraId]->supported_iso_modes[i]);
10425 if (NAME_NOT_FOUND != val)
10426 available_iso_modes[size++] = val;
10427 }
10428 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10429 available_iso_modes, size);
10430 }
10431
10432 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010433 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010434 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10435 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10436 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10437
10438 int32_t available_saturation_range[4];
10439 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10440 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10441 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10442 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10443 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10444 available_saturation_range, 4);
10445
10446 uint8_t is_hdr_values[2];
10447 is_hdr_values[0] = 0;
10448 is_hdr_values[1] = 1;
10449 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10450 is_hdr_values, 2);
10451
10452 float is_hdr_confidence_range[2];
10453 is_hdr_confidence_range[0] = 0.0;
10454 is_hdr_confidence_range[1] = 1.0;
10455 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10456 is_hdr_confidence_range, 2);
10457
Emilian Peev0a972ef2017-03-16 10:25:53 +000010458 size_t eepromLength = strnlen(
10459 reinterpret_cast<const char *>(
10460 gCamCapability[cameraId]->eeprom_version_info),
10461 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10462 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010463 char easelInfo[] = ",E:N";
10464 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10465 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10466 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010467 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10468 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010469 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010470 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10471 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10472 }
10473
Thierry Strudel3d639192016-09-09 11:52:26 -070010474 gStaticMetadata[cameraId] = staticInfo.release();
10475 return rc;
10476}
10477
10478/*===========================================================================
10479 * FUNCTION : makeTable
10480 *
10481 * DESCRIPTION: make a table of sizes
10482 *
10483 * PARAMETERS :
10484 *
10485 *
10486 *==========================================================================*/
10487void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10488 size_t max_size, int32_t *sizeTable)
10489{
10490 size_t j = 0;
10491 if (size > max_size) {
10492 size = max_size;
10493 }
10494 for (size_t i = 0; i < size; i++) {
10495 sizeTable[j] = dimTable[i].width;
10496 sizeTable[j+1] = dimTable[i].height;
10497 j+=2;
10498 }
10499}
10500
10501/*===========================================================================
10502 * FUNCTION : makeFPSTable
10503 *
10504 * DESCRIPTION: make a table of fps ranges
10505 *
10506 * PARAMETERS :
10507 *
10508 *==========================================================================*/
10509void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10510 size_t max_size, int32_t *fpsRangesTable)
10511{
10512 size_t j = 0;
10513 if (size > max_size) {
10514 size = max_size;
10515 }
10516 for (size_t i = 0; i < size; i++) {
10517 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10518 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10519 j+=2;
10520 }
10521}
10522
10523/*===========================================================================
10524 * FUNCTION : makeOverridesList
10525 *
10526 * DESCRIPTION: make a list of scene mode overrides
10527 *
10528 * PARAMETERS :
10529 *
10530 *
10531 *==========================================================================*/
10532void QCamera3HardwareInterface::makeOverridesList(
10533 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10534 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10535{
10536 /*daemon will give a list of overrides for all scene modes.
10537 However we should send the fwk only the overrides for the scene modes
10538 supported by the framework*/
10539 size_t j = 0;
10540 if (size > max_size) {
10541 size = max_size;
10542 }
10543 size_t focus_count = CAM_FOCUS_MODE_MAX;
10544 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10545 focus_count);
10546 for (size_t i = 0; i < size; i++) {
10547 bool supt = false;
10548 size_t index = supported_indexes[i];
10549 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10550 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10551 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10552 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10553 overridesTable[index].awb_mode);
10554 if (NAME_NOT_FOUND != val) {
10555 overridesList[j+1] = (uint8_t)val;
10556 }
10557 uint8_t focus_override = overridesTable[index].af_mode;
10558 for (size_t k = 0; k < focus_count; k++) {
10559 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10560 supt = true;
10561 break;
10562 }
10563 }
10564 if (supt) {
10565 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10566 focus_override);
10567 if (NAME_NOT_FOUND != val) {
10568 overridesList[j+2] = (uint8_t)val;
10569 }
10570 } else {
10571 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10572 }
10573 j+=3;
10574 }
10575}
10576
10577/*===========================================================================
10578 * FUNCTION : filterJpegSizes
10579 *
10580 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10581 * could be downscaled to
10582 *
10583 * PARAMETERS :
10584 *
10585 * RETURN : length of jpegSizes array
10586 *==========================================================================*/
10587
10588size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10589 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10590 uint8_t downscale_factor)
10591{
10592 if (0 == downscale_factor) {
10593 downscale_factor = 1;
10594 }
10595
10596 int32_t min_width = active_array_size.width / downscale_factor;
10597 int32_t min_height = active_array_size.height / downscale_factor;
10598 size_t jpegSizesCnt = 0;
10599 if (processedSizesCnt > maxCount) {
10600 processedSizesCnt = maxCount;
10601 }
10602 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10603 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10604 jpegSizes[jpegSizesCnt] = processedSizes[i];
10605 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10606 jpegSizesCnt += 2;
10607 }
10608 }
10609 return jpegSizesCnt;
10610}
10611
10612/*===========================================================================
10613 * FUNCTION : computeNoiseModelEntryS
10614 *
10615 * DESCRIPTION: function to map a given sensitivity to the S noise
10616 * model parameters in the DNG noise model.
10617 *
10618 * PARAMETERS : sens : the sensor sensitivity
10619 *
10620 ** RETURN : S (sensor amplification) noise
10621 *
10622 *==========================================================================*/
10623double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10624 double s = gCamCapability[mCameraId]->gradient_S * sens +
10625 gCamCapability[mCameraId]->offset_S;
10626 return ((s < 0.0) ? 0.0 : s);
10627}
10628
10629/*===========================================================================
10630 * FUNCTION : computeNoiseModelEntryO
10631 *
10632 * DESCRIPTION: function to map a given sensitivity to the O noise
10633 * model parameters in the DNG noise model.
10634 *
10635 * PARAMETERS : sens : the sensor sensitivity
10636 *
10637 ** RETURN : O (sensor readout) noise
10638 *
10639 *==========================================================================*/
10640double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10641 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10642 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10643 1.0 : (1.0 * sens / max_analog_sens);
10644 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10645 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10646 return ((o < 0.0) ? 0.0 : o);
10647}
10648
10649/*===========================================================================
10650 * FUNCTION : getSensorSensitivity
10651 *
10652 * DESCRIPTION: convert iso_mode to an integer value
10653 *
10654 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10655 *
10656 ** RETURN : sensitivity supported by sensor
10657 *
10658 *==========================================================================*/
10659int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10660{
10661 int32_t sensitivity;
10662
10663 switch (iso_mode) {
10664 case CAM_ISO_MODE_100:
10665 sensitivity = 100;
10666 break;
10667 case CAM_ISO_MODE_200:
10668 sensitivity = 200;
10669 break;
10670 case CAM_ISO_MODE_400:
10671 sensitivity = 400;
10672 break;
10673 case CAM_ISO_MODE_800:
10674 sensitivity = 800;
10675 break;
10676 case CAM_ISO_MODE_1600:
10677 sensitivity = 1600;
10678 break;
10679 default:
10680 sensitivity = -1;
10681 break;
10682 }
10683 return sensitivity;
10684}
10685
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010686int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010687 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010688 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10689 // to connect to Easel.
10690 bool doNotpowerOnEasel =
10691 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10692
10693 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010694 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10695 return OK;
10696 }
10697
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010698 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010699 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010700 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010701 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010702 return res;
10703 }
10704
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010705 EaselManagerClientOpened = true;
10706
10707 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010708 if (res != OK) {
10709 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10710 }
10711
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010712 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010713 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010714
10715 // Expose enableZsl key only when HDR+ mode is enabled.
10716 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010717 }
10718
10719 return OK;
10720}
10721
Thierry Strudel3d639192016-09-09 11:52:26 -070010722/*===========================================================================
10723 * FUNCTION : getCamInfo
10724 *
10725 * DESCRIPTION: query camera capabilities
10726 *
10727 * PARAMETERS :
10728 * @cameraId : camera Id
10729 * @info : camera info struct to be filled in with camera capabilities
10730 *
10731 * RETURN : int type of status
10732 * NO_ERROR -- success
10733 * none-zero failure code
10734 *==========================================================================*/
10735int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10736 struct camera_info *info)
10737{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010738 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010739 int rc = 0;
10740
10741 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010742
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010743 {
10744 Mutex::Autolock l(gHdrPlusClientLock);
10745 rc = initHdrPlusClientLocked();
10746 if (rc != OK) {
10747 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10748 pthread_mutex_unlock(&gCamLock);
10749 return rc;
10750 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010751 }
10752
Thierry Strudel3d639192016-09-09 11:52:26 -070010753 if (NULL == gCamCapability[cameraId]) {
10754 rc = initCapabilities(cameraId);
10755 if (rc < 0) {
10756 pthread_mutex_unlock(&gCamLock);
10757 return rc;
10758 }
10759 }
10760
10761 if (NULL == gStaticMetadata[cameraId]) {
10762 rc = initStaticMetadata(cameraId);
10763 if (rc < 0) {
10764 pthread_mutex_unlock(&gCamLock);
10765 return rc;
10766 }
10767 }
10768
10769 switch(gCamCapability[cameraId]->position) {
10770 case CAM_POSITION_BACK:
10771 case CAM_POSITION_BACK_AUX:
10772 info->facing = CAMERA_FACING_BACK;
10773 break;
10774
10775 case CAM_POSITION_FRONT:
10776 case CAM_POSITION_FRONT_AUX:
10777 info->facing = CAMERA_FACING_FRONT;
10778 break;
10779
10780 default:
10781 LOGE("Unknown position type %d for camera id:%d",
10782 gCamCapability[cameraId]->position, cameraId);
10783 rc = -1;
10784 break;
10785 }
10786
10787
10788 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010789#ifndef USE_HAL_3_3
10790 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10791#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010792 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010793#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010794 info->static_camera_characteristics = gStaticMetadata[cameraId];
10795
10796 //For now assume both cameras can operate independently.
10797 info->conflicting_devices = NULL;
10798 info->conflicting_devices_length = 0;
10799
10800 //resource cost is 100 * MIN(1.0, m/M),
10801 //where m is throughput requirement with maximum stream configuration
10802 //and M is CPP maximum throughput.
10803 float max_fps = 0.0;
10804 for (uint32_t i = 0;
10805 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10806 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10807 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10808 }
10809 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10810 gCamCapability[cameraId]->active_array_size.width *
10811 gCamCapability[cameraId]->active_array_size.height * max_fps /
10812 gCamCapability[cameraId]->max_pixel_bandwidth;
10813 info->resource_cost = 100 * MIN(1.0, ratio);
10814 LOGI("camera %d resource cost is %d", cameraId,
10815 info->resource_cost);
10816
10817 pthread_mutex_unlock(&gCamLock);
10818 return rc;
10819}
10820
10821/*===========================================================================
10822 * FUNCTION : translateCapabilityToMetadata
10823 *
10824 * DESCRIPTION: translate the capability into camera_metadata_t
10825 *
10826 * PARAMETERS : type of the request
10827 *
10828 *
10829 * RETURN : success: camera_metadata_t*
10830 * failure: NULL
10831 *
10832 *==========================================================================*/
10833camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10834{
10835 if (mDefaultMetadata[type] != NULL) {
10836 return mDefaultMetadata[type];
10837 }
10838 //first time we are handling this request
10839 //fill up the metadata structure using the wrapper class
10840 CameraMetadata settings;
10841 //translate from cam_capability_t to camera_metadata_tag_t
10842 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10843 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10844 int32_t defaultRequestID = 0;
10845 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10846
10847 /* OIS disable */
10848 char ois_prop[PROPERTY_VALUE_MAX];
10849 memset(ois_prop, 0, sizeof(ois_prop));
10850 property_get("persist.camera.ois.disable", ois_prop, "0");
10851 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10852
10853 /* Force video to use OIS */
10854 char videoOisProp[PROPERTY_VALUE_MAX];
10855 memset(videoOisProp, 0, sizeof(videoOisProp));
10856 property_get("persist.camera.ois.video", videoOisProp, "1");
10857 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010858
10859 // Hybrid AE enable/disable
10860 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10861 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10862 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10863 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10864
Thierry Strudel3d639192016-09-09 11:52:26 -070010865 uint8_t controlIntent = 0;
10866 uint8_t focusMode;
10867 uint8_t vsMode;
10868 uint8_t optStabMode;
10869 uint8_t cacMode;
10870 uint8_t edge_mode;
10871 uint8_t noise_red_mode;
10872 uint8_t tonemap_mode;
10873 bool highQualityModeEntryAvailable = FALSE;
10874 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010875 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010876 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10877 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010878 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010879 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010880 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010881
Thierry Strudel3d639192016-09-09 11:52:26 -070010882 switch (type) {
10883 case CAMERA3_TEMPLATE_PREVIEW:
10884 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10885 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10886 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10887 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10888 edge_mode = ANDROID_EDGE_MODE_FAST;
10889 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10890 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10891 break;
10892 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10893 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10894 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10895 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10896 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10897 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10898 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10899 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10900 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10901 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10902 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10903 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10904 highQualityModeEntryAvailable = TRUE;
10905 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10906 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10907 fastModeEntryAvailable = TRUE;
10908 }
10909 }
10910 if (highQualityModeEntryAvailable) {
10911 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10912 } else if (fastModeEntryAvailable) {
10913 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10914 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010915 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10916 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10917 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010918 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010919 break;
10920 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10921 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10922 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10923 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010924 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10925 edge_mode = ANDROID_EDGE_MODE_FAST;
10926 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10927 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10928 if (forceVideoOis)
10929 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10930 break;
10931 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10932 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10933 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10934 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010935 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10936 edge_mode = ANDROID_EDGE_MODE_FAST;
10937 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10938 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10939 if (forceVideoOis)
10940 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10941 break;
10942 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10943 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10944 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10945 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10946 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10947 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10948 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10949 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10950 break;
10951 case CAMERA3_TEMPLATE_MANUAL:
10952 edge_mode = ANDROID_EDGE_MODE_FAST;
10953 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10954 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10955 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10956 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10957 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10958 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10959 break;
10960 default:
10961 edge_mode = ANDROID_EDGE_MODE_FAST;
10962 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10963 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10964 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10965 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10966 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10967 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10968 break;
10969 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010970 // Set CAC to OFF if underlying device doesn't support
10971 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10972 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10973 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010974 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10975 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10976 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10977 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10978 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10979 }
10980 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010981 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010982 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010983
10984 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10985 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10986 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10987 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10988 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10989 || ois_disable)
10990 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10991 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010992 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010993
10994 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10995 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10996
10997 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10998 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10999
11000 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11001 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11002
11003 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11004 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11005
11006 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11007 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11008
11009 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11010 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11011
11012 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11013 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11014
11015 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11016 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11017
11018 /*flash*/
11019 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11020 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11021
11022 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11023 settings.update(ANDROID_FLASH_FIRING_POWER,
11024 &flashFiringLevel, 1);
11025
11026 /* lens */
11027 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11028 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11029
11030 if (gCamCapability[mCameraId]->filter_densities_count) {
11031 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11032 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11033 gCamCapability[mCameraId]->filter_densities_count);
11034 }
11035
11036 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11037 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11038
Thierry Strudel3d639192016-09-09 11:52:26 -070011039 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11040 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11041
11042 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11043 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11044
11045 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11046 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11047
11048 /* face detection (default to OFF) */
11049 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11050 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11051
Thierry Strudel54dc9782017-02-15 12:12:10 -080011052 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11053 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011054
11055 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11056 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11057
11058 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11059 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11060
Thierry Strudel3d639192016-09-09 11:52:26 -070011061
11062 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11063 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11064
11065 /* Exposure time(Update the Min Exposure Time)*/
11066 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11067 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11068
11069 /* frame duration */
11070 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11071 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11072
11073 /* sensitivity */
11074 static const int32_t default_sensitivity = 100;
11075 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011076#ifndef USE_HAL_3_3
11077 static const int32_t default_isp_sensitivity =
11078 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11079 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11080#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011081
11082 /*edge mode*/
11083 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11084
11085 /*noise reduction mode*/
11086 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11087
11088 /*color correction mode*/
11089 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11090 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11091
11092 /*transform matrix mode*/
11093 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11094
11095 int32_t scaler_crop_region[4];
11096 scaler_crop_region[0] = 0;
11097 scaler_crop_region[1] = 0;
11098 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11099 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11100 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11101
11102 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11103 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11104
11105 /*focus distance*/
11106 float focus_distance = 0.0;
11107 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11108
11109 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011110 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011111 float max_range = 0.0;
11112 float max_fixed_fps = 0.0;
11113 int32_t fps_range[2] = {0, 0};
11114 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11115 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011116 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11117 TEMPLATE_MAX_PREVIEW_FPS) {
11118 continue;
11119 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011120 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11121 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11122 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11123 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11124 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11125 if (range > max_range) {
11126 fps_range[0] =
11127 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11128 fps_range[1] =
11129 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11130 max_range = range;
11131 }
11132 } else {
11133 if (range < 0.01 && max_fixed_fps <
11134 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11135 fps_range[0] =
11136 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11137 fps_range[1] =
11138 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11139 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11140 }
11141 }
11142 }
11143 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11144
11145 /*precapture trigger*/
11146 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11147 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11148
11149 /*af trigger*/
11150 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11151 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11152
11153 /* ae & af regions */
11154 int32_t active_region[] = {
11155 gCamCapability[mCameraId]->active_array_size.left,
11156 gCamCapability[mCameraId]->active_array_size.top,
11157 gCamCapability[mCameraId]->active_array_size.left +
11158 gCamCapability[mCameraId]->active_array_size.width,
11159 gCamCapability[mCameraId]->active_array_size.top +
11160 gCamCapability[mCameraId]->active_array_size.height,
11161 0};
11162 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11163 sizeof(active_region) / sizeof(active_region[0]));
11164 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11165 sizeof(active_region) / sizeof(active_region[0]));
11166
11167 /* black level lock */
11168 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11169 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11170
Thierry Strudel3d639192016-09-09 11:52:26 -070011171 //special defaults for manual template
11172 if (type == CAMERA3_TEMPLATE_MANUAL) {
11173 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11174 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11175
11176 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11177 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11178
11179 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11180 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11181
11182 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11183 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11184
11185 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11186 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11187
11188 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11189 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11190 }
11191
11192
11193 /* TNR
11194 * We'll use this location to determine which modes TNR will be set.
11195 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11196 * This is not to be confused with linking on a per stream basis that decision
11197 * is still on per-session basis and will be handled as part of config stream
11198 */
11199 uint8_t tnr_enable = 0;
11200
11201 if (m_bTnrPreview || m_bTnrVideo) {
11202
11203 switch (type) {
11204 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11205 tnr_enable = 1;
11206 break;
11207
11208 default:
11209 tnr_enable = 0;
11210 break;
11211 }
11212
11213 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11214 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11215 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11216
11217 LOGD("TNR:%d with process plate %d for template:%d",
11218 tnr_enable, tnr_process_type, type);
11219 }
11220
11221 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011222 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011223 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11224
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011225 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011226 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11227
Shuzhen Wang920ea402017-05-03 08:49:39 -070011228 uint8_t related_camera_id = mCameraId;
11229 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011230
11231 /* CDS default */
11232 char prop[PROPERTY_VALUE_MAX];
11233 memset(prop, 0, sizeof(prop));
11234 property_get("persist.camera.CDS", prop, "Auto");
11235 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11236 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11237 if (CAM_CDS_MODE_MAX == cds_mode) {
11238 cds_mode = CAM_CDS_MODE_AUTO;
11239 }
11240
11241 /* Disabling CDS in templates which have TNR enabled*/
11242 if (tnr_enable)
11243 cds_mode = CAM_CDS_MODE_OFF;
11244
11245 int32_t mode = cds_mode;
11246 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011247
Thierry Strudel269c81a2016-10-12 12:13:59 -070011248 /* Manual Convergence AEC Speed is disabled by default*/
11249 float default_aec_speed = 0;
11250 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11251
11252 /* Manual Convergence AWB Speed is disabled by default*/
11253 float default_awb_speed = 0;
11254 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11255
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011256 // Set instant AEC to normal convergence by default
11257 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11258 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11259
Shuzhen Wang19463d72016-03-08 11:09:52 -080011260 /* hybrid ae */
11261 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11262
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011263 if (gExposeEnableZslKey) {
11264 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11265 }
11266
Thierry Strudel3d639192016-09-09 11:52:26 -070011267 mDefaultMetadata[type] = settings.release();
11268
11269 return mDefaultMetadata[type];
11270}
11271
11272/*===========================================================================
11273 * FUNCTION : setFrameParameters
11274 *
11275 * DESCRIPTION: set parameters per frame as requested in the metadata from
11276 * framework
11277 *
11278 * PARAMETERS :
11279 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011280 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011281 * @blob_request: Whether this request is a blob request or not
11282 *
11283 * RETURN : success: NO_ERROR
11284 * failure:
11285 *==========================================================================*/
11286int QCamera3HardwareInterface::setFrameParameters(
11287 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011288 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011289 int blob_request,
11290 uint32_t snapshotStreamId)
11291{
11292 /*translate from camera_metadata_t type to parm_type_t*/
11293 int rc = 0;
11294 int32_t hal_version = CAM_HAL_V3;
11295
11296 clear_metadata_buffer(mParameters);
11297 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11298 LOGE("Failed to set hal version in the parameters");
11299 return BAD_VALUE;
11300 }
11301
11302 /*we need to update the frame number in the parameters*/
11303 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11304 request->frame_number)) {
11305 LOGE("Failed to set the frame number in the parameters");
11306 return BAD_VALUE;
11307 }
11308
11309 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011310 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011311 LOGE("Failed to set stream type mask in the parameters");
11312 return BAD_VALUE;
11313 }
11314
11315 if (mUpdateDebugLevel) {
11316 uint32_t dummyDebugLevel = 0;
11317 /* The value of dummyDebugLevel is irrelavent. On
11318 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11319 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11320 dummyDebugLevel)) {
11321 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11322 return BAD_VALUE;
11323 }
11324 mUpdateDebugLevel = false;
11325 }
11326
11327 if(request->settings != NULL){
11328 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11329 if (blob_request)
11330 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11331 }
11332
11333 return rc;
11334}
11335
11336/*===========================================================================
11337 * FUNCTION : setReprocParameters
11338 *
11339 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11340 * return it.
11341 *
11342 * PARAMETERS :
11343 * @request : request that needs to be serviced
11344 *
11345 * RETURN : success: NO_ERROR
11346 * failure:
11347 *==========================================================================*/
11348int32_t QCamera3HardwareInterface::setReprocParameters(
11349 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11350 uint32_t snapshotStreamId)
11351{
11352 /*translate from camera_metadata_t type to parm_type_t*/
11353 int rc = 0;
11354
11355 if (NULL == request->settings){
11356 LOGE("Reprocess settings cannot be NULL");
11357 return BAD_VALUE;
11358 }
11359
11360 if (NULL == reprocParam) {
11361 LOGE("Invalid reprocessing metadata buffer");
11362 return BAD_VALUE;
11363 }
11364 clear_metadata_buffer(reprocParam);
11365
11366 /*we need to update the frame number in the parameters*/
11367 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11368 request->frame_number)) {
11369 LOGE("Failed to set the frame number in the parameters");
11370 return BAD_VALUE;
11371 }
11372
11373 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11374 if (rc < 0) {
11375 LOGE("Failed to translate reproc request");
11376 return rc;
11377 }
11378
11379 CameraMetadata frame_settings;
11380 frame_settings = request->settings;
11381 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11382 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11383 int32_t *crop_count =
11384 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11385 int32_t *crop_data =
11386 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11387 int32_t *roi_map =
11388 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11389 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11390 cam_crop_data_t crop_meta;
11391 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11392 crop_meta.num_of_streams = 1;
11393 crop_meta.crop_info[0].crop.left = crop_data[0];
11394 crop_meta.crop_info[0].crop.top = crop_data[1];
11395 crop_meta.crop_info[0].crop.width = crop_data[2];
11396 crop_meta.crop_info[0].crop.height = crop_data[3];
11397
11398 crop_meta.crop_info[0].roi_map.left =
11399 roi_map[0];
11400 crop_meta.crop_info[0].roi_map.top =
11401 roi_map[1];
11402 crop_meta.crop_info[0].roi_map.width =
11403 roi_map[2];
11404 crop_meta.crop_info[0].roi_map.height =
11405 roi_map[3];
11406
11407 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11408 rc = BAD_VALUE;
11409 }
11410 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11411 request->input_buffer->stream,
11412 crop_meta.crop_info[0].crop.left,
11413 crop_meta.crop_info[0].crop.top,
11414 crop_meta.crop_info[0].crop.width,
11415 crop_meta.crop_info[0].crop.height);
11416 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11417 request->input_buffer->stream,
11418 crop_meta.crop_info[0].roi_map.left,
11419 crop_meta.crop_info[0].roi_map.top,
11420 crop_meta.crop_info[0].roi_map.width,
11421 crop_meta.crop_info[0].roi_map.height);
11422 } else {
11423 LOGE("Invalid reprocess crop count %d!", *crop_count);
11424 }
11425 } else {
11426 LOGE("No crop data from matching output stream");
11427 }
11428
11429 /* These settings are not needed for regular requests so handle them specially for
11430 reprocess requests; information needed for EXIF tags */
11431 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11432 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11433 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11434 if (NAME_NOT_FOUND != val) {
11435 uint32_t flashMode = (uint32_t)val;
11436 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11437 rc = BAD_VALUE;
11438 }
11439 } else {
11440 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11441 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11442 }
11443 } else {
11444 LOGH("No flash mode in reprocess settings");
11445 }
11446
11447 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11448 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11449 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11450 rc = BAD_VALUE;
11451 }
11452 } else {
11453 LOGH("No flash state in reprocess settings");
11454 }
11455
11456 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11457 uint8_t *reprocessFlags =
11458 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11459 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11460 *reprocessFlags)) {
11461 rc = BAD_VALUE;
11462 }
11463 }
11464
Thierry Strudel54dc9782017-02-15 12:12:10 -080011465 // Add exif debug data to internal metadata
11466 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11467 mm_jpeg_debug_exif_params_t *debug_params =
11468 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11469 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11470 // AE
11471 if (debug_params->ae_debug_params_valid == TRUE) {
11472 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11473 debug_params->ae_debug_params);
11474 }
11475 // AWB
11476 if (debug_params->awb_debug_params_valid == TRUE) {
11477 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11478 debug_params->awb_debug_params);
11479 }
11480 // AF
11481 if (debug_params->af_debug_params_valid == TRUE) {
11482 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11483 debug_params->af_debug_params);
11484 }
11485 // ASD
11486 if (debug_params->asd_debug_params_valid == TRUE) {
11487 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11488 debug_params->asd_debug_params);
11489 }
11490 // Stats
11491 if (debug_params->stats_debug_params_valid == TRUE) {
11492 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11493 debug_params->stats_debug_params);
11494 }
11495 // BE Stats
11496 if (debug_params->bestats_debug_params_valid == TRUE) {
11497 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11498 debug_params->bestats_debug_params);
11499 }
11500 // BHIST
11501 if (debug_params->bhist_debug_params_valid == TRUE) {
11502 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11503 debug_params->bhist_debug_params);
11504 }
11505 // 3A Tuning
11506 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11507 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11508 debug_params->q3a_tuning_debug_params);
11509 }
11510 }
11511
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011512 // Add metadata which reprocess needs
11513 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11514 cam_reprocess_info_t *repro_info =
11515 (cam_reprocess_info_t *)frame_settings.find
11516 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011517 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011518 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011519 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011520 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011521 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011522 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011523 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011524 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011525 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011526 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011527 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011528 repro_info->pipeline_flip);
11529 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11530 repro_info->af_roi);
11531 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11532 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011533 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11534 CAM_INTF_PARM_ROTATION metadata then has been added in
11535 translateToHalMetadata. HAL need to keep this new rotation
11536 metadata. Otherwise, the old rotation info saved in the vendor tag
11537 would be used */
11538 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11539 CAM_INTF_PARM_ROTATION, reprocParam) {
11540 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11541 } else {
11542 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011543 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011544 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011545 }
11546
11547 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11548 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11549 roi.width and roi.height would be the final JPEG size.
11550 For now, HAL only checks this for reprocess request */
11551 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11552 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11553 uint8_t *enable =
11554 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11555 if (*enable == TRUE) {
11556 int32_t *crop_data =
11557 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11558 cam_stream_crop_info_t crop_meta;
11559 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11560 crop_meta.stream_id = 0;
11561 crop_meta.crop.left = crop_data[0];
11562 crop_meta.crop.top = crop_data[1];
11563 crop_meta.crop.width = crop_data[2];
11564 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011565 // The JPEG crop roi should match cpp output size
11566 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11567 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11568 crop_meta.roi_map.left = 0;
11569 crop_meta.roi_map.top = 0;
11570 crop_meta.roi_map.width = cpp_crop->crop.width;
11571 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011572 }
11573 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11574 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011575 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011576 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011577 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11578 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011579 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011580 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11581
11582 // Add JPEG scale information
11583 cam_dimension_t scale_dim;
11584 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11585 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11586 int32_t *roi =
11587 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11588 scale_dim.width = roi[2];
11589 scale_dim.height = roi[3];
11590 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11591 scale_dim);
11592 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11593 scale_dim.width, scale_dim.height, mCameraId);
11594 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011595 }
11596 }
11597
11598 return rc;
11599}
11600
11601/*===========================================================================
11602 * FUNCTION : saveRequestSettings
11603 *
11604 * DESCRIPTION: Add any settings that might have changed to the request settings
11605 * and save the settings to be applied on the frame
11606 *
11607 * PARAMETERS :
11608 * @jpegMetadata : the extracted and/or modified jpeg metadata
11609 * @request : request with initial settings
11610 *
11611 * RETURN :
11612 * camera_metadata_t* : pointer to the saved request settings
11613 *==========================================================================*/
11614camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11615 const CameraMetadata &jpegMetadata,
11616 camera3_capture_request_t *request)
11617{
11618 camera_metadata_t *resultMetadata;
11619 CameraMetadata camMetadata;
11620 camMetadata = request->settings;
11621
11622 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11623 int32_t thumbnail_size[2];
11624 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11625 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11626 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11627 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11628 }
11629
11630 if (request->input_buffer != NULL) {
11631 uint8_t reprocessFlags = 1;
11632 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11633 (uint8_t*)&reprocessFlags,
11634 sizeof(reprocessFlags));
11635 }
11636
11637 resultMetadata = camMetadata.release();
11638 return resultMetadata;
11639}
11640
11641/*===========================================================================
11642 * FUNCTION : setHalFpsRange
11643 *
11644 * DESCRIPTION: set FPS range parameter
11645 *
11646 *
11647 * PARAMETERS :
11648 * @settings : Metadata from framework
11649 * @hal_metadata: Metadata buffer
11650 *
11651 *
11652 * RETURN : success: NO_ERROR
11653 * failure:
11654 *==========================================================================*/
11655int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11656 metadata_buffer_t *hal_metadata)
11657{
11658 int32_t rc = NO_ERROR;
11659 cam_fps_range_t fps_range;
11660 fps_range.min_fps = (float)
11661 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11662 fps_range.max_fps = (float)
11663 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11664 fps_range.video_min_fps = fps_range.min_fps;
11665 fps_range.video_max_fps = fps_range.max_fps;
11666
11667 LOGD("aeTargetFpsRange fps: [%f %f]",
11668 fps_range.min_fps, fps_range.max_fps);
11669 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11670 * follows:
11671 * ---------------------------------------------------------------|
11672 * Video stream is absent in configure_streams |
11673 * (Camcorder preview before the first video record |
11674 * ---------------------------------------------------------------|
11675 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11676 * | | | vid_min/max_fps|
11677 * ---------------------------------------------------------------|
11678 * NO | [ 30, 240] | 240 | [240, 240] |
11679 * |-------------|-------------|----------------|
11680 * | [240, 240] | 240 | [240, 240] |
11681 * ---------------------------------------------------------------|
11682 * Video stream is present in configure_streams |
11683 * ---------------------------------------------------------------|
11684 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11685 * | | | vid_min/max_fps|
11686 * ---------------------------------------------------------------|
11687 * NO | [ 30, 240] | 240 | [240, 240] |
11688 * (camcorder prev |-------------|-------------|----------------|
11689 * after video rec | [240, 240] | 240 | [240, 240] |
11690 * is stopped) | | | |
11691 * ---------------------------------------------------------------|
11692 * YES | [ 30, 240] | 240 | [240, 240] |
11693 * |-------------|-------------|----------------|
11694 * | [240, 240] | 240 | [240, 240] |
11695 * ---------------------------------------------------------------|
11696 * When Video stream is absent in configure_streams,
11697 * preview fps = sensor_fps / batchsize
11698 * Eg: for 240fps at batchSize 4, preview = 60fps
11699 * for 120fps at batchSize 4, preview = 30fps
11700 *
11701 * When video stream is present in configure_streams, preview fps is as per
11702 * the ratio of preview buffers to video buffers requested in process
11703 * capture request
11704 */
11705 mBatchSize = 0;
11706 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11707 fps_range.min_fps = fps_range.video_max_fps;
11708 fps_range.video_min_fps = fps_range.video_max_fps;
11709 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11710 fps_range.max_fps);
11711 if (NAME_NOT_FOUND != val) {
11712 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11713 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11714 return BAD_VALUE;
11715 }
11716
11717 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11718 /* If batchmode is currently in progress and the fps changes,
11719 * set the flag to restart the sensor */
11720 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11721 (mHFRVideoFps != fps_range.max_fps)) {
11722 mNeedSensorRestart = true;
11723 }
11724 mHFRVideoFps = fps_range.max_fps;
11725 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11726 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11727 mBatchSize = MAX_HFR_BATCH_SIZE;
11728 }
11729 }
11730 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11731
11732 }
11733 } else {
11734 /* HFR mode is session param in backend/ISP. This should be reset when
11735 * in non-HFR mode */
11736 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11737 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11738 return BAD_VALUE;
11739 }
11740 }
11741 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11742 return BAD_VALUE;
11743 }
11744 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11745 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11746 return rc;
11747}
11748
11749/*===========================================================================
11750 * FUNCTION : translateToHalMetadata
11751 *
11752 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11753 *
11754 *
11755 * PARAMETERS :
11756 * @request : request sent from framework
11757 *
11758 *
11759 * RETURN : success: NO_ERROR
11760 * failure:
11761 *==========================================================================*/
11762int QCamera3HardwareInterface::translateToHalMetadata
11763 (const camera3_capture_request_t *request,
11764 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011765 uint32_t snapshotStreamId) {
11766 if (request == nullptr || hal_metadata == nullptr) {
11767 return BAD_VALUE;
11768 }
11769
11770 int64_t minFrameDuration = getMinFrameDuration(request);
11771
11772 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11773 minFrameDuration);
11774}
11775
11776int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11777 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11778 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11779
Thierry Strudel3d639192016-09-09 11:52:26 -070011780 int rc = 0;
11781 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011782 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011783
11784 /* Do not change the order of the following list unless you know what you are
11785 * doing.
11786 * The order is laid out in such a way that parameters in the front of the table
11787 * may be used to override the parameters later in the table. Examples are:
11788 * 1. META_MODE should precede AEC/AWB/AF MODE
11789 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11790 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11791 * 4. Any mode should precede it's corresponding settings
11792 */
11793 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11794 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11795 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11796 rc = BAD_VALUE;
11797 }
11798 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11799 if (rc != NO_ERROR) {
11800 LOGE("extractSceneMode failed");
11801 }
11802 }
11803
11804 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11805 uint8_t fwk_aeMode =
11806 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11807 uint8_t aeMode;
11808 int32_t redeye;
11809
11810 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11811 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011812 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11813 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011814 } else {
11815 aeMode = CAM_AE_MODE_ON;
11816 }
11817 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11818 redeye = 1;
11819 } else {
11820 redeye = 0;
11821 }
11822
11823 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11824 fwk_aeMode);
11825 if (NAME_NOT_FOUND != val) {
11826 int32_t flashMode = (int32_t)val;
11827 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11828 }
11829
11830 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11831 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11832 rc = BAD_VALUE;
11833 }
11834 }
11835
11836 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11837 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11838 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11839 fwk_whiteLevel);
11840 if (NAME_NOT_FOUND != val) {
11841 uint8_t whiteLevel = (uint8_t)val;
11842 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11843 rc = BAD_VALUE;
11844 }
11845 }
11846 }
11847
11848 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11849 uint8_t fwk_cacMode =
11850 frame_settings.find(
11851 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11852 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11853 fwk_cacMode);
11854 if (NAME_NOT_FOUND != val) {
11855 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11856 bool entryAvailable = FALSE;
11857 // Check whether Frameworks set CAC mode is supported in device or not
11858 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11859 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11860 entryAvailable = TRUE;
11861 break;
11862 }
11863 }
11864 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11865 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11866 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11867 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11868 if (entryAvailable == FALSE) {
11869 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11870 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11871 } else {
11872 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11873 // High is not supported and so set the FAST as spec say's underlying
11874 // device implementation can be the same for both modes.
11875 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11876 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11877 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11878 // in order to avoid the fps drop due to high quality
11879 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11880 } else {
11881 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11882 }
11883 }
11884 }
11885 LOGD("Final cacMode is %d", cacMode);
11886 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11887 rc = BAD_VALUE;
11888 }
11889 } else {
11890 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11891 }
11892 }
11893
Thierry Strudel2896d122017-02-23 19:18:03 -080011894 char af_value[PROPERTY_VALUE_MAX];
11895 property_get("persist.camera.af.infinity", af_value, "0");
11896
Jason Lee84ae9972017-02-24 13:24:24 -080011897 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011898 if (atoi(af_value) == 0) {
11899 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011900 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011901 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11902 fwk_focusMode);
11903 if (NAME_NOT_FOUND != val) {
11904 uint8_t focusMode = (uint8_t)val;
11905 LOGD("set focus mode %d", focusMode);
11906 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11907 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11908 rc = BAD_VALUE;
11909 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011910 }
11911 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011912 } else {
11913 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11914 LOGE("Focus forced to infinity %d", focusMode);
11915 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11916 rc = BAD_VALUE;
11917 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011918 }
11919
Jason Lee84ae9972017-02-24 13:24:24 -080011920 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11921 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011922 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11923 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11924 focalDistance)) {
11925 rc = BAD_VALUE;
11926 }
11927 }
11928
11929 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11930 uint8_t fwk_antibandingMode =
11931 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11932 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11933 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11934 if (NAME_NOT_FOUND != val) {
11935 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011936 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11937 if (m60HzZone) {
11938 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11939 } else {
11940 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11941 }
11942 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011943 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11944 hal_antibandingMode)) {
11945 rc = BAD_VALUE;
11946 }
11947 }
11948 }
11949
11950 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11951 int32_t expCompensation = frame_settings.find(
11952 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11953 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11954 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11955 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11956 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011957 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011958 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11959 expCompensation)) {
11960 rc = BAD_VALUE;
11961 }
11962 }
11963
11964 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11965 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11966 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11967 rc = BAD_VALUE;
11968 }
11969 }
11970 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11971 rc = setHalFpsRange(frame_settings, hal_metadata);
11972 if (rc != NO_ERROR) {
11973 LOGE("setHalFpsRange failed");
11974 }
11975 }
11976
11977 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11978 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11979 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11980 rc = BAD_VALUE;
11981 }
11982 }
11983
11984 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11985 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11986 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11987 fwk_effectMode);
11988 if (NAME_NOT_FOUND != val) {
11989 uint8_t effectMode = (uint8_t)val;
11990 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11991 rc = BAD_VALUE;
11992 }
11993 }
11994 }
11995
11996 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11997 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11998 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11999 colorCorrectMode)) {
12000 rc = BAD_VALUE;
12001 }
12002 }
12003
12004 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12005 cam_color_correct_gains_t colorCorrectGains;
12006 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12007 colorCorrectGains.gains[i] =
12008 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12009 }
12010 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12011 colorCorrectGains)) {
12012 rc = BAD_VALUE;
12013 }
12014 }
12015
12016 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12017 cam_color_correct_matrix_t colorCorrectTransform;
12018 cam_rational_type_t transform_elem;
12019 size_t num = 0;
12020 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12021 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12022 transform_elem.numerator =
12023 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12024 transform_elem.denominator =
12025 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12026 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12027 num++;
12028 }
12029 }
12030 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12031 colorCorrectTransform)) {
12032 rc = BAD_VALUE;
12033 }
12034 }
12035
12036 cam_trigger_t aecTrigger;
12037 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12038 aecTrigger.trigger_id = -1;
12039 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12040 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12041 aecTrigger.trigger =
12042 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12043 aecTrigger.trigger_id =
12044 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12045 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12046 aecTrigger)) {
12047 rc = BAD_VALUE;
12048 }
12049 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12050 aecTrigger.trigger, aecTrigger.trigger_id);
12051 }
12052
12053 /*af_trigger must come with a trigger id*/
12054 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12055 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12056 cam_trigger_t af_trigger;
12057 af_trigger.trigger =
12058 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12059 af_trigger.trigger_id =
12060 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12061 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12062 rc = BAD_VALUE;
12063 }
12064 LOGD("AfTrigger: %d AfTriggerID: %d",
12065 af_trigger.trigger, af_trigger.trigger_id);
12066 }
12067
12068 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12069 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12070 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12071 rc = BAD_VALUE;
12072 }
12073 }
12074 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12075 cam_edge_application_t edge_application;
12076 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012077
Thierry Strudel3d639192016-09-09 11:52:26 -070012078 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12079 edge_application.sharpness = 0;
12080 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012081 edge_application.sharpness =
12082 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12083 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12084 int32_t sharpness =
12085 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12086 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12087 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12088 LOGD("Setting edge mode sharpness %d", sharpness);
12089 edge_application.sharpness = sharpness;
12090 }
12091 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012092 }
12093 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12094 rc = BAD_VALUE;
12095 }
12096 }
12097
12098 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12099 int32_t respectFlashMode = 1;
12100 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12101 uint8_t fwk_aeMode =
12102 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012103 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12104 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12105 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012106 respectFlashMode = 0;
12107 LOGH("AE Mode controls flash, ignore android.flash.mode");
12108 }
12109 }
12110 if (respectFlashMode) {
12111 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12112 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12113 LOGH("flash mode after mapping %d", val);
12114 // To check: CAM_INTF_META_FLASH_MODE usage
12115 if (NAME_NOT_FOUND != val) {
12116 uint8_t flashMode = (uint8_t)val;
12117 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12118 rc = BAD_VALUE;
12119 }
12120 }
12121 }
12122 }
12123
12124 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12125 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12126 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12127 rc = BAD_VALUE;
12128 }
12129 }
12130
12131 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12132 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12133 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12134 flashFiringTime)) {
12135 rc = BAD_VALUE;
12136 }
12137 }
12138
12139 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12140 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12141 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12142 hotPixelMode)) {
12143 rc = BAD_VALUE;
12144 }
12145 }
12146
12147 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12148 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12149 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12150 lensAperture)) {
12151 rc = BAD_VALUE;
12152 }
12153 }
12154
12155 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12156 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12157 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12158 filterDensity)) {
12159 rc = BAD_VALUE;
12160 }
12161 }
12162
12163 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12164 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12165 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12166 focalLength)) {
12167 rc = BAD_VALUE;
12168 }
12169 }
12170
12171 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12172 uint8_t optStabMode =
12173 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12174 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12175 optStabMode)) {
12176 rc = BAD_VALUE;
12177 }
12178 }
12179
12180 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12181 uint8_t videoStabMode =
12182 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12183 LOGD("videoStabMode from APP = %d", videoStabMode);
12184 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12185 videoStabMode)) {
12186 rc = BAD_VALUE;
12187 }
12188 }
12189
12190
12191 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12192 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12193 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12194 noiseRedMode)) {
12195 rc = BAD_VALUE;
12196 }
12197 }
12198
12199 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12200 float reprocessEffectiveExposureFactor =
12201 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12202 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12203 reprocessEffectiveExposureFactor)) {
12204 rc = BAD_VALUE;
12205 }
12206 }
12207
12208 cam_crop_region_t scalerCropRegion;
12209 bool scalerCropSet = false;
12210 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12211 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12212 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12213 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12214 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12215
12216 // Map coordinate system from active array to sensor output.
12217 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12218 scalerCropRegion.width, scalerCropRegion.height);
12219
12220 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12221 scalerCropRegion)) {
12222 rc = BAD_VALUE;
12223 }
12224 scalerCropSet = true;
12225 }
12226
12227 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12228 int64_t sensorExpTime =
12229 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12230 LOGD("setting sensorExpTime %lld", sensorExpTime);
12231 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12232 sensorExpTime)) {
12233 rc = BAD_VALUE;
12234 }
12235 }
12236
12237 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12238 int64_t sensorFrameDuration =
12239 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012240 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12241 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12242 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12243 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12244 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12245 sensorFrameDuration)) {
12246 rc = BAD_VALUE;
12247 }
12248 }
12249
12250 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12251 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12252 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12253 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12254 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12255 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12256 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12257 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12258 sensorSensitivity)) {
12259 rc = BAD_VALUE;
12260 }
12261 }
12262
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012263#ifndef USE_HAL_3_3
12264 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12265 int32_t ispSensitivity =
12266 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12267 if (ispSensitivity <
12268 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12269 ispSensitivity =
12270 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12271 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12272 }
12273 if (ispSensitivity >
12274 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12275 ispSensitivity =
12276 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12277 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12278 }
12279 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12280 ispSensitivity)) {
12281 rc = BAD_VALUE;
12282 }
12283 }
12284#endif
12285
Thierry Strudel3d639192016-09-09 11:52:26 -070012286 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12287 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12288 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12289 rc = BAD_VALUE;
12290 }
12291 }
12292
12293 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12294 uint8_t fwk_facedetectMode =
12295 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12296
12297 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12298 fwk_facedetectMode);
12299
12300 if (NAME_NOT_FOUND != val) {
12301 uint8_t facedetectMode = (uint8_t)val;
12302 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12303 facedetectMode)) {
12304 rc = BAD_VALUE;
12305 }
12306 }
12307 }
12308
Thierry Strudel54dc9782017-02-15 12:12:10 -080012309 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012310 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012311 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012312 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12313 histogramMode)) {
12314 rc = BAD_VALUE;
12315 }
12316 }
12317
12318 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12319 uint8_t sharpnessMapMode =
12320 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12321 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12322 sharpnessMapMode)) {
12323 rc = BAD_VALUE;
12324 }
12325 }
12326
12327 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12328 uint8_t tonemapMode =
12329 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12330 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12331 rc = BAD_VALUE;
12332 }
12333 }
12334 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12335 /*All tonemap channels will have the same number of points*/
12336 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12337 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12338 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12339 cam_rgb_tonemap_curves tonemapCurves;
12340 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12341 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12342 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12343 tonemapCurves.tonemap_points_cnt,
12344 CAM_MAX_TONEMAP_CURVE_SIZE);
12345 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12346 }
12347
12348 /* ch0 = G*/
12349 size_t point = 0;
12350 cam_tonemap_curve_t tonemapCurveGreen;
12351 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12352 for (size_t j = 0; j < 2; j++) {
12353 tonemapCurveGreen.tonemap_points[i][j] =
12354 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12355 point++;
12356 }
12357 }
12358 tonemapCurves.curves[0] = tonemapCurveGreen;
12359
12360 /* ch 1 = B */
12361 point = 0;
12362 cam_tonemap_curve_t tonemapCurveBlue;
12363 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12364 for (size_t j = 0; j < 2; j++) {
12365 tonemapCurveBlue.tonemap_points[i][j] =
12366 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12367 point++;
12368 }
12369 }
12370 tonemapCurves.curves[1] = tonemapCurveBlue;
12371
12372 /* ch 2 = R */
12373 point = 0;
12374 cam_tonemap_curve_t tonemapCurveRed;
12375 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12376 for (size_t j = 0; j < 2; j++) {
12377 tonemapCurveRed.tonemap_points[i][j] =
12378 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12379 point++;
12380 }
12381 }
12382 tonemapCurves.curves[2] = tonemapCurveRed;
12383
12384 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12385 tonemapCurves)) {
12386 rc = BAD_VALUE;
12387 }
12388 }
12389
12390 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12391 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12392 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12393 captureIntent)) {
12394 rc = BAD_VALUE;
12395 }
12396 }
12397
12398 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12399 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12400 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12401 blackLevelLock)) {
12402 rc = BAD_VALUE;
12403 }
12404 }
12405
12406 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12407 uint8_t lensShadingMapMode =
12408 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12409 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12410 lensShadingMapMode)) {
12411 rc = BAD_VALUE;
12412 }
12413 }
12414
12415 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12416 cam_area_t roi;
12417 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012418 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012419
12420 // Map coordinate system from active array to sensor output.
12421 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12422 roi.rect.height);
12423
12424 if (scalerCropSet) {
12425 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12426 }
12427 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12428 rc = BAD_VALUE;
12429 }
12430 }
12431
12432 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12433 cam_area_t roi;
12434 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012435 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012436
12437 // Map coordinate system from active array to sensor output.
12438 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12439 roi.rect.height);
12440
12441 if (scalerCropSet) {
12442 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12443 }
12444 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12445 rc = BAD_VALUE;
12446 }
12447 }
12448
12449 // CDS for non-HFR non-video mode
12450 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12451 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12452 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12453 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12454 LOGE("Invalid CDS mode %d!", *fwk_cds);
12455 } else {
12456 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12457 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12458 rc = BAD_VALUE;
12459 }
12460 }
12461 }
12462
Thierry Strudel04e026f2016-10-10 11:27:36 -070012463 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012464 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012465 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012466 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12467 }
12468 if (m_bVideoHdrEnabled)
12469 vhdr = CAM_VIDEO_HDR_MODE_ON;
12470
Thierry Strudel54dc9782017-02-15 12:12:10 -080012471 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12472
12473 if(vhdr != curr_hdr_state)
12474 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12475
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012476 rc = setVideoHdrMode(mParameters, vhdr);
12477 if (rc != NO_ERROR) {
12478 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012479 }
12480
12481 //IR
12482 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12483 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12484 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012485 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12486 uint8_t isIRon = 0;
12487
12488 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012489 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12490 LOGE("Invalid IR mode %d!", fwk_ir);
12491 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012492 if(isIRon != curr_ir_state )
12493 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12494
Thierry Strudel04e026f2016-10-10 11:27:36 -070012495 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12496 CAM_INTF_META_IR_MODE, fwk_ir)) {
12497 rc = BAD_VALUE;
12498 }
12499 }
12500 }
12501
Thierry Strudel54dc9782017-02-15 12:12:10 -080012502 //Binning Correction Mode
12503 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12504 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12505 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12506 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12507 || (0 > fwk_binning_correction)) {
12508 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12509 } else {
12510 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12511 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12512 rc = BAD_VALUE;
12513 }
12514 }
12515 }
12516
Thierry Strudel269c81a2016-10-12 12:13:59 -070012517 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12518 float aec_speed;
12519 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12520 LOGD("AEC Speed :%f", aec_speed);
12521 if ( aec_speed < 0 ) {
12522 LOGE("Invalid AEC mode %f!", aec_speed);
12523 } else {
12524 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12525 aec_speed)) {
12526 rc = BAD_VALUE;
12527 }
12528 }
12529 }
12530
12531 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12532 float awb_speed;
12533 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12534 LOGD("AWB Speed :%f", awb_speed);
12535 if ( awb_speed < 0 ) {
12536 LOGE("Invalid AWB mode %f!", awb_speed);
12537 } else {
12538 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12539 awb_speed)) {
12540 rc = BAD_VALUE;
12541 }
12542 }
12543 }
12544
Thierry Strudel3d639192016-09-09 11:52:26 -070012545 // TNR
12546 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12547 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12548 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012549 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012550 cam_denoise_param_t tnr;
12551 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12552 tnr.process_plates =
12553 (cam_denoise_process_type_t)frame_settings.find(
12554 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12555 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012556
12557 if(b_TnrRequested != curr_tnr_state)
12558 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12559
Thierry Strudel3d639192016-09-09 11:52:26 -070012560 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12561 rc = BAD_VALUE;
12562 }
12563 }
12564
Thierry Strudel54dc9782017-02-15 12:12:10 -080012565 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012566 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012567 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012568 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12569 *exposure_metering_mode)) {
12570 rc = BAD_VALUE;
12571 }
12572 }
12573
Thierry Strudel3d639192016-09-09 11:52:26 -070012574 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12575 int32_t fwk_testPatternMode =
12576 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12577 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12578 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12579
12580 if (NAME_NOT_FOUND != testPatternMode) {
12581 cam_test_pattern_data_t testPatternData;
12582 memset(&testPatternData, 0, sizeof(testPatternData));
12583 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12584 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12585 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12586 int32_t *fwk_testPatternData =
12587 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12588 testPatternData.r = fwk_testPatternData[0];
12589 testPatternData.b = fwk_testPatternData[3];
12590 switch (gCamCapability[mCameraId]->color_arrangement) {
12591 case CAM_FILTER_ARRANGEMENT_RGGB:
12592 case CAM_FILTER_ARRANGEMENT_GRBG:
12593 testPatternData.gr = fwk_testPatternData[1];
12594 testPatternData.gb = fwk_testPatternData[2];
12595 break;
12596 case CAM_FILTER_ARRANGEMENT_GBRG:
12597 case CAM_FILTER_ARRANGEMENT_BGGR:
12598 testPatternData.gr = fwk_testPatternData[2];
12599 testPatternData.gb = fwk_testPatternData[1];
12600 break;
12601 default:
12602 LOGE("color arrangement %d is not supported",
12603 gCamCapability[mCameraId]->color_arrangement);
12604 break;
12605 }
12606 }
12607 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12608 testPatternData)) {
12609 rc = BAD_VALUE;
12610 }
12611 } else {
12612 LOGE("Invalid framework sensor test pattern mode %d",
12613 fwk_testPatternMode);
12614 }
12615 }
12616
12617 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12618 size_t count = 0;
12619 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12620 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12621 gps_coords.data.d, gps_coords.count, count);
12622 if (gps_coords.count != count) {
12623 rc = BAD_VALUE;
12624 }
12625 }
12626
12627 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12628 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12629 size_t count = 0;
12630 const char *gps_methods_src = (const char *)
12631 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12632 memset(gps_methods, '\0', sizeof(gps_methods));
12633 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12634 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12635 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12636 if (GPS_PROCESSING_METHOD_SIZE != count) {
12637 rc = BAD_VALUE;
12638 }
12639 }
12640
12641 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12642 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12643 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12644 gps_timestamp)) {
12645 rc = BAD_VALUE;
12646 }
12647 }
12648
12649 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12650 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12651 cam_rotation_info_t rotation_info;
12652 if (orientation == 0) {
12653 rotation_info.rotation = ROTATE_0;
12654 } else if (orientation == 90) {
12655 rotation_info.rotation = ROTATE_90;
12656 } else if (orientation == 180) {
12657 rotation_info.rotation = ROTATE_180;
12658 } else if (orientation == 270) {
12659 rotation_info.rotation = ROTATE_270;
12660 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012661 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012662 rotation_info.streamId = snapshotStreamId;
12663 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12664 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12665 rc = BAD_VALUE;
12666 }
12667 }
12668
12669 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12670 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12671 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12672 rc = BAD_VALUE;
12673 }
12674 }
12675
12676 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12677 uint32_t thumb_quality = (uint32_t)
12678 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12679 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12680 thumb_quality)) {
12681 rc = BAD_VALUE;
12682 }
12683 }
12684
12685 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12686 cam_dimension_t dim;
12687 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12688 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12689 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12690 rc = BAD_VALUE;
12691 }
12692 }
12693
12694 // Internal metadata
12695 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12696 size_t count = 0;
12697 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12698 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12699 privatedata.data.i32, privatedata.count, count);
12700 if (privatedata.count != count) {
12701 rc = BAD_VALUE;
12702 }
12703 }
12704
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012705 // ISO/Exposure Priority
12706 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12707 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12708 cam_priority_mode_t mode =
12709 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12710 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12711 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12712 use_iso_exp_pty.previewOnly = FALSE;
12713 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12714 use_iso_exp_pty.value = *ptr;
12715
12716 if(CAM_ISO_PRIORITY == mode) {
12717 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12718 use_iso_exp_pty)) {
12719 rc = BAD_VALUE;
12720 }
12721 }
12722 else {
12723 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12724 use_iso_exp_pty)) {
12725 rc = BAD_VALUE;
12726 }
12727 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012728
12729 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12730 rc = BAD_VALUE;
12731 }
12732 }
12733 } else {
12734 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12735 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012736 }
12737 }
12738
12739 // Saturation
12740 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12741 int32_t* use_saturation =
12742 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12743 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12744 rc = BAD_VALUE;
12745 }
12746 }
12747
Thierry Strudel3d639192016-09-09 11:52:26 -070012748 // EV step
12749 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12750 gCamCapability[mCameraId]->exp_compensation_step)) {
12751 rc = BAD_VALUE;
12752 }
12753
12754 // CDS info
12755 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12756 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12757 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12758
12759 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12760 CAM_INTF_META_CDS_DATA, *cdsData)) {
12761 rc = BAD_VALUE;
12762 }
12763 }
12764
Shuzhen Wang19463d72016-03-08 11:09:52 -080012765 // Hybrid AE
12766 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12767 uint8_t *hybrid_ae = (uint8_t *)
12768 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12769
12770 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12771 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12772 rc = BAD_VALUE;
12773 }
12774 }
12775
Shuzhen Wang14415f52016-11-16 18:26:18 -080012776 // Histogram
12777 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12778 uint8_t histogramMode =
12779 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12780 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12781 histogramMode)) {
12782 rc = BAD_VALUE;
12783 }
12784 }
12785
12786 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12787 int32_t histogramBins =
12788 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12789 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12790 histogramBins)) {
12791 rc = BAD_VALUE;
12792 }
12793 }
12794
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012795 // Tracking AF
12796 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12797 uint8_t trackingAfTrigger =
12798 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12799 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12800 trackingAfTrigger)) {
12801 rc = BAD_VALUE;
12802 }
12803 }
12804
Thierry Strudel3d639192016-09-09 11:52:26 -070012805 return rc;
12806}
12807
12808/*===========================================================================
12809 * FUNCTION : captureResultCb
12810 *
12811 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12812 *
12813 * PARAMETERS :
12814 * @frame : frame information from mm-camera-interface
12815 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12816 * @userdata: userdata
12817 *
12818 * RETURN : NONE
12819 *==========================================================================*/
12820void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12821 camera3_stream_buffer_t *buffer,
12822 uint32_t frame_number, bool isInputBuffer, void *userdata)
12823{
12824 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12825 if (hw == NULL) {
12826 LOGE("Invalid hw %p", hw);
12827 return;
12828 }
12829
12830 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12831 return;
12832}
12833
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012834/*===========================================================================
12835 * FUNCTION : setBufferErrorStatus
12836 *
12837 * DESCRIPTION: Callback handler for channels to report any buffer errors
12838 *
12839 * PARAMETERS :
12840 * @ch : Channel on which buffer error is reported from
12841 * @frame_number : frame number on which buffer error is reported on
12842 * @buffer_status : buffer error status
12843 * @userdata: userdata
12844 *
12845 * RETURN : NONE
12846 *==========================================================================*/
12847void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12848 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12849{
12850 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12851 if (hw == NULL) {
12852 LOGE("Invalid hw %p", hw);
12853 return;
12854 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012855
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012856 hw->setBufferErrorStatus(ch, frame_number, err);
12857 return;
12858}
12859
12860void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12861 uint32_t frameNumber, camera3_buffer_status_t err)
12862{
12863 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12864 pthread_mutex_lock(&mMutex);
12865
12866 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12867 if (req.frame_number != frameNumber)
12868 continue;
12869 for (auto& k : req.mPendingBufferList) {
12870 if(k.stream->priv == ch) {
12871 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12872 }
12873 }
12874 }
12875
12876 pthread_mutex_unlock(&mMutex);
12877 return;
12878}
Thierry Strudel3d639192016-09-09 11:52:26 -070012879/*===========================================================================
12880 * FUNCTION : initialize
12881 *
12882 * DESCRIPTION: Pass framework callback pointers to HAL
12883 *
12884 * PARAMETERS :
12885 *
12886 *
12887 * RETURN : Success : 0
12888 * Failure: -ENODEV
12889 *==========================================================================*/
12890
12891int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12892 const camera3_callback_ops_t *callback_ops)
12893{
12894 LOGD("E");
12895 QCamera3HardwareInterface *hw =
12896 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12897 if (!hw) {
12898 LOGE("NULL camera device");
12899 return -ENODEV;
12900 }
12901
12902 int rc = hw->initialize(callback_ops);
12903 LOGD("X");
12904 return rc;
12905}
12906
12907/*===========================================================================
12908 * FUNCTION : configure_streams
12909 *
12910 * DESCRIPTION:
12911 *
12912 * PARAMETERS :
12913 *
12914 *
12915 * RETURN : Success: 0
12916 * Failure: -EINVAL (if stream configuration is invalid)
12917 * -ENODEV (fatal error)
12918 *==========================================================================*/
12919
12920int QCamera3HardwareInterface::configure_streams(
12921 const struct camera3_device *device,
12922 camera3_stream_configuration_t *stream_list)
12923{
12924 LOGD("E");
12925 QCamera3HardwareInterface *hw =
12926 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12927 if (!hw) {
12928 LOGE("NULL camera device");
12929 return -ENODEV;
12930 }
12931 int rc = hw->configureStreams(stream_list);
12932 LOGD("X");
12933 return rc;
12934}
12935
12936/*===========================================================================
12937 * FUNCTION : construct_default_request_settings
12938 *
12939 * DESCRIPTION: Configure a settings buffer to meet the required use case
12940 *
12941 * PARAMETERS :
12942 *
12943 *
12944 * RETURN : Success: Return valid metadata
12945 * Failure: Return NULL
12946 *==========================================================================*/
12947const camera_metadata_t* QCamera3HardwareInterface::
12948 construct_default_request_settings(const struct camera3_device *device,
12949 int type)
12950{
12951
12952 LOGD("E");
12953 camera_metadata_t* fwk_metadata = NULL;
12954 QCamera3HardwareInterface *hw =
12955 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12956 if (!hw) {
12957 LOGE("NULL camera device");
12958 return NULL;
12959 }
12960
12961 fwk_metadata = hw->translateCapabilityToMetadata(type);
12962
12963 LOGD("X");
12964 return fwk_metadata;
12965}
12966
12967/*===========================================================================
12968 * FUNCTION : process_capture_request
12969 *
12970 * DESCRIPTION:
12971 *
12972 * PARAMETERS :
12973 *
12974 *
12975 * RETURN :
12976 *==========================================================================*/
12977int QCamera3HardwareInterface::process_capture_request(
12978 const struct camera3_device *device,
12979 camera3_capture_request_t *request)
12980{
12981 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012982 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012983 QCamera3HardwareInterface *hw =
12984 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12985 if (!hw) {
12986 LOGE("NULL camera device");
12987 return -EINVAL;
12988 }
12989
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012990 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012991 LOGD("X");
12992 return rc;
12993}
12994
12995/*===========================================================================
12996 * FUNCTION : dump
12997 *
12998 * DESCRIPTION:
12999 *
13000 * PARAMETERS :
13001 *
13002 *
13003 * RETURN :
13004 *==========================================================================*/
13005
13006void QCamera3HardwareInterface::dump(
13007 const struct camera3_device *device, int fd)
13008{
13009 /* Log level property is read when "adb shell dumpsys media.camera" is
13010 called so that the log level can be controlled without restarting
13011 the media server */
13012 getLogLevel();
13013
13014 LOGD("E");
13015 QCamera3HardwareInterface *hw =
13016 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13017 if (!hw) {
13018 LOGE("NULL camera device");
13019 return;
13020 }
13021
13022 hw->dump(fd);
13023 LOGD("X");
13024 return;
13025}
13026
13027/*===========================================================================
13028 * FUNCTION : flush
13029 *
13030 * DESCRIPTION:
13031 *
13032 * PARAMETERS :
13033 *
13034 *
13035 * RETURN :
13036 *==========================================================================*/
13037
13038int QCamera3HardwareInterface::flush(
13039 const struct camera3_device *device)
13040{
13041 int rc;
13042 LOGD("E");
13043 QCamera3HardwareInterface *hw =
13044 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13045 if (!hw) {
13046 LOGE("NULL camera device");
13047 return -EINVAL;
13048 }
13049
13050 pthread_mutex_lock(&hw->mMutex);
13051 // Validate current state
13052 switch (hw->mState) {
13053 case STARTED:
13054 /* valid state */
13055 break;
13056
13057 case ERROR:
13058 pthread_mutex_unlock(&hw->mMutex);
13059 hw->handleCameraDeviceError();
13060 return -ENODEV;
13061
13062 default:
13063 LOGI("Flush returned during state %d", hw->mState);
13064 pthread_mutex_unlock(&hw->mMutex);
13065 return 0;
13066 }
13067 pthread_mutex_unlock(&hw->mMutex);
13068
13069 rc = hw->flush(true /* restart channels */ );
13070 LOGD("X");
13071 return rc;
13072}
13073
13074/*===========================================================================
13075 * FUNCTION : close_camera_device
13076 *
13077 * DESCRIPTION:
13078 *
13079 * PARAMETERS :
13080 *
13081 *
13082 * RETURN :
13083 *==========================================================================*/
13084int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13085{
13086 int ret = NO_ERROR;
13087 QCamera3HardwareInterface *hw =
13088 reinterpret_cast<QCamera3HardwareInterface *>(
13089 reinterpret_cast<camera3_device_t *>(device)->priv);
13090 if (!hw) {
13091 LOGE("NULL camera device");
13092 return BAD_VALUE;
13093 }
13094
13095 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13096 delete hw;
13097 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013098 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013099 return ret;
13100}
13101
13102/*===========================================================================
13103 * FUNCTION : getWaveletDenoiseProcessPlate
13104 *
13105 * DESCRIPTION: query wavelet denoise process plate
13106 *
13107 * PARAMETERS : None
13108 *
13109 * RETURN : WNR prcocess plate value
13110 *==========================================================================*/
13111cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13112{
13113 char prop[PROPERTY_VALUE_MAX];
13114 memset(prop, 0, sizeof(prop));
13115 property_get("persist.denoise.process.plates", prop, "0");
13116 int processPlate = atoi(prop);
13117 switch(processPlate) {
13118 case 0:
13119 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13120 case 1:
13121 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13122 case 2:
13123 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13124 case 3:
13125 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13126 default:
13127 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13128 }
13129}
13130
13131
13132/*===========================================================================
13133 * FUNCTION : getTemporalDenoiseProcessPlate
13134 *
13135 * DESCRIPTION: query temporal denoise process plate
13136 *
13137 * PARAMETERS : None
13138 *
13139 * RETURN : TNR prcocess plate value
13140 *==========================================================================*/
13141cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13142{
13143 char prop[PROPERTY_VALUE_MAX];
13144 memset(prop, 0, sizeof(prop));
13145 property_get("persist.tnr.process.plates", prop, "0");
13146 int processPlate = atoi(prop);
13147 switch(processPlate) {
13148 case 0:
13149 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13150 case 1:
13151 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13152 case 2:
13153 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13154 case 3:
13155 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13156 default:
13157 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13158 }
13159}
13160
13161
13162/*===========================================================================
13163 * FUNCTION : extractSceneMode
13164 *
13165 * DESCRIPTION: Extract scene mode from frameworks set metadata
13166 *
13167 * PARAMETERS :
13168 * @frame_settings: CameraMetadata reference
13169 * @metaMode: ANDROID_CONTORL_MODE
13170 * @hal_metadata: hal metadata structure
13171 *
13172 * RETURN : None
13173 *==========================================================================*/
13174int32_t QCamera3HardwareInterface::extractSceneMode(
13175 const CameraMetadata &frame_settings, uint8_t metaMode,
13176 metadata_buffer_t *hal_metadata)
13177{
13178 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013179 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13180
13181 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13182 LOGD("Ignoring control mode OFF_KEEP_STATE");
13183 return NO_ERROR;
13184 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013185
13186 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13187 camera_metadata_ro_entry entry =
13188 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13189 if (0 == entry.count)
13190 return rc;
13191
13192 uint8_t fwk_sceneMode = entry.data.u8[0];
13193
13194 int val = lookupHalName(SCENE_MODES_MAP,
13195 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13196 fwk_sceneMode);
13197 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013198 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013199 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013200 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013201 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013202
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013203 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13204 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13205 }
13206
13207 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13208 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013209 cam_hdr_param_t hdr_params;
13210 hdr_params.hdr_enable = 1;
13211 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13212 hdr_params.hdr_need_1x = false;
13213 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13214 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13215 rc = BAD_VALUE;
13216 }
13217 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013218
Thierry Strudel3d639192016-09-09 11:52:26 -070013219 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13220 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13221 rc = BAD_VALUE;
13222 }
13223 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013224
13225 if (mForceHdrSnapshot) {
13226 cam_hdr_param_t hdr_params;
13227 hdr_params.hdr_enable = 1;
13228 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13229 hdr_params.hdr_need_1x = false;
13230 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13231 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13232 rc = BAD_VALUE;
13233 }
13234 }
13235
Thierry Strudel3d639192016-09-09 11:52:26 -070013236 return rc;
13237}
13238
13239/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013240 * FUNCTION : setVideoHdrMode
13241 *
13242 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13243 *
13244 * PARAMETERS :
13245 * @hal_metadata: hal metadata structure
13246 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13247 *
13248 * RETURN : None
13249 *==========================================================================*/
13250int32_t QCamera3HardwareInterface::setVideoHdrMode(
13251 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13252{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013253 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13254 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13255 }
13256
13257 LOGE("Invalid Video HDR mode %d!", vhdr);
13258 return BAD_VALUE;
13259}
13260
13261/*===========================================================================
13262 * FUNCTION : setSensorHDR
13263 *
13264 * DESCRIPTION: Enable/disable sensor HDR.
13265 *
13266 * PARAMETERS :
13267 * @hal_metadata: hal metadata structure
13268 * @enable: boolean whether to enable/disable sensor HDR
13269 *
13270 * RETURN : None
13271 *==========================================================================*/
13272int32_t QCamera3HardwareInterface::setSensorHDR(
13273 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13274{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013275 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013276 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13277
13278 if (enable) {
13279 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13280 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13281 #ifdef _LE_CAMERA_
13282 //Default to staggered HDR for IOT
13283 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13284 #else
13285 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13286 #endif
13287 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13288 }
13289
13290 bool isSupported = false;
13291 switch (sensor_hdr) {
13292 case CAM_SENSOR_HDR_IN_SENSOR:
13293 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13294 CAM_QCOM_FEATURE_SENSOR_HDR) {
13295 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013296 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013297 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013298 break;
13299 case CAM_SENSOR_HDR_ZIGZAG:
13300 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13301 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13302 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013303 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013304 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013305 break;
13306 case CAM_SENSOR_HDR_STAGGERED:
13307 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13308 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13309 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013310 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013311 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013312 break;
13313 case CAM_SENSOR_HDR_OFF:
13314 isSupported = true;
13315 LOGD("Turning off sensor HDR");
13316 break;
13317 default:
13318 LOGE("HDR mode %d not supported", sensor_hdr);
13319 rc = BAD_VALUE;
13320 break;
13321 }
13322
13323 if(isSupported) {
13324 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13325 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13326 rc = BAD_VALUE;
13327 } else {
13328 if(!isVideoHdrEnable)
13329 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013330 }
13331 }
13332 return rc;
13333}
13334
13335/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013336 * FUNCTION : needRotationReprocess
13337 *
13338 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13339 *
13340 * PARAMETERS : none
13341 *
13342 * RETURN : true: needed
13343 * false: no need
13344 *==========================================================================*/
13345bool QCamera3HardwareInterface::needRotationReprocess()
13346{
13347 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13348 // current rotation is not zero, and pp has the capability to process rotation
13349 LOGH("need do reprocess for rotation");
13350 return true;
13351 }
13352
13353 return false;
13354}
13355
13356/*===========================================================================
13357 * FUNCTION : needReprocess
13358 *
13359 * DESCRIPTION: if reprocess in needed
13360 *
13361 * PARAMETERS : none
13362 *
13363 * RETURN : true: needed
13364 * false: no need
13365 *==========================================================================*/
13366bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13367{
13368 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13369 // TODO: add for ZSL HDR later
13370 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13371 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13372 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13373 return true;
13374 } else {
13375 LOGH("already post processed frame");
13376 return false;
13377 }
13378 }
13379 return needRotationReprocess();
13380}
13381
13382/*===========================================================================
13383 * FUNCTION : needJpegExifRotation
13384 *
13385 * DESCRIPTION: if rotation from jpeg is needed
13386 *
13387 * PARAMETERS : none
13388 *
13389 * RETURN : true: needed
13390 * false: no need
13391 *==========================================================================*/
13392bool QCamera3HardwareInterface::needJpegExifRotation()
13393{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013394 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013395 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13396 LOGD("Need use Jpeg EXIF Rotation");
13397 return true;
13398 }
13399 return false;
13400}
13401
13402/*===========================================================================
13403 * FUNCTION : addOfflineReprocChannel
13404 *
13405 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13406 * coming from input channel
13407 *
13408 * PARAMETERS :
13409 * @config : reprocess configuration
13410 * @inputChHandle : pointer to the input (source) channel
13411 *
13412 *
13413 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13414 *==========================================================================*/
13415QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13416 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13417{
13418 int32_t rc = NO_ERROR;
13419 QCamera3ReprocessChannel *pChannel = NULL;
13420
13421 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013422 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13423 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013424 if (NULL == pChannel) {
13425 LOGE("no mem for reprocess channel");
13426 return NULL;
13427 }
13428
13429 rc = pChannel->initialize(IS_TYPE_NONE);
13430 if (rc != NO_ERROR) {
13431 LOGE("init reprocess channel failed, ret = %d", rc);
13432 delete pChannel;
13433 return NULL;
13434 }
13435
13436 // pp feature config
13437 cam_pp_feature_config_t pp_config;
13438 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13439
13440 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13441 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13442 & CAM_QCOM_FEATURE_DSDN) {
13443 //Use CPP CDS incase h/w supports it.
13444 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13445 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13446 }
13447 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13448 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13449 }
13450
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013451 if (config.hdr_param.hdr_enable) {
13452 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13453 pp_config.hdr_param = config.hdr_param;
13454 }
13455
13456 if (mForceHdrSnapshot) {
13457 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13458 pp_config.hdr_param.hdr_enable = 1;
13459 pp_config.hdr_param.hdr_need_1x = 0;
13460 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13461 }
13462
Thierry Strudel3d639192016-09-09 11:52:26 -070013463 rc = pChannel->addReprocStreamsFromSource(pp_config,
13464 config,
13465 IS_TYPE_NONE,
13466 mMetadataChannel);
13467
13468 if (rc != NO_ERROR) {
13469 delete pChannel;
13470 return NULL;
13471 }
13472 return pChannel;
13473}
13474
13475/*===========================================================================
13476 * FUNCTION : getMobicatMask
13477 *
13478 * DESCRIPTION: returns mobicat mask
13479 *
13480 * PARAMETERS : none
13481 *
13482 * RETURN : mobicat mask
13483 *
13484 *==========================================================================*/
13485uint8_t QCamera3HardwareInterface::getMobicatMask()
13486{
13487 return m_MobicatMask;
13488}
13489
13490/*===========================================================================
13491 * FUNCTION : setMobicat
13492 *
13493 * DESCRIPTION: set Mobicat on/off.
13494 *
13495 * PARAMETERS :
13496 * @params : none
13497 *
13498 * RETURN : int32_t type of status
13499 * NO_ERROR -- success
13500 * none-zero failure code
13501 *==========================================================================*/
13502int32_t QCamera3HardwareInterface::setMobicat()
13503{
13504 char value [PROPERTY_VALUE_MAX];
13505 property_get("persist.camera.mobicat", value, "0");
13506 int32_t ret = NO_ERROR;
13507 uint8_t enableMobi = (uint8_t)atoi(value);
13508
13509 if (enableMobi) {
13510 tune_cmd_t tune_cmd;
13511 tune_cmd.type = SET_RELOAD_CHROMATIX;
13512 tune_cmd.module = MODULE_ALL;
13513 tune_cmd.value = TRUE;
13514 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13515 CAM_INTF_PARM_SET_VFE_COMMAND,
13516 tune_cmd);
13517
13518 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13519 CAM_INTF_PARM_SET_PP_COMMAND,
13520 tune_cmd);
13521 }
13522 m_MobicatMask = enableMobi;
13523
13524 return ret;
13525}
13526
13527/*===========================================================================
13528* FUNCTION : getLogLevel
13529*
13530* DESCRIPTION: Reads the log level property into a variable
13531*
13532* PARAMETERS :
13533* None
13534*
13535* RETURN :
13536* None
13537*==========================================================================*/
13538void QCamera3HardwareInterface::getLogLevel()
13539{
13540 char prop[PROPERTY_VALUE_MAX];
13541 uint32_t globalLogLevel = 0;
13542
13543 property_get("persist.camera.hal.debug", prop, "0");
13544 int val = atoi(prop);
13545 if (0 <= val) {
13546 gCamHal3LogLevel = (uint32_t)val;
13547 }
13548
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013549 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013550 gKpiDebugLevel = atoi(prop);
13551
13552 property_get("persist.camera.global.debug", prop, "0");
13553 val = atoi(prop);
13554 if (0 <= val) {
13555 globalLogLevel = (uint32_t)val;
13556 }
13557
13558 /* Highest log level among hal.logs and global.logs is selected */
13559 if (gCamHal3LogLevel < globalLogLevel)
13560 gCamHal3LogLevel = globalLogLevel;
13561
13562 return;
13563}
13564
13565/*===========================================================================
13566 * FUNCTION : validateStreamRotations
13567 *
13568 * DESCRIPTION: Check if the rotations requested are supported
13569 *
13570 * PARAMETERS :
13571 * @stream_list : streams to be configured
13572 *
13573 * RETURN : NO_ERROR on success
13574 * -EINVAL on failure
13575 *
13576 *==========================================================================*/
13577int QCamera3HardwareInterface::validateStreamRotations(
13578 camera3_stream_configuration_t *streamList)
13579{
13580 int rc = NO_ERROR;
13581
13582 /*
13583 * Loop through all streams requested in configuration
13584 * Check if unsupported rotations have been requested on any of them
13585 */
13586 for (size_t j = 0; j < streamList->num_streams; j++){
13587 camera3_stream_t *newStream = streamList->streams[j];
13588
13589 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13590 bool isImplDef = (newStream->format ==
13591 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13592 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13593 isImplDef);
13594
13595 if (isRotated && (!isImplDef || isZsl)) {
13596 LOGE("Error: Unsupported rotation of %d requested for stream"
13597 "type:%d and stream format:%d",
13598 newStream->rotation, newStream->stream_type,
13599 newStream->format);
13600 rc = -EINVAL;
13601 break;
13602 }
13603 }
13604
13605 return rc;
13606}
13607
13608/*===========================================================================
13609* FUNCTION : getFlashInfo
13610*
13611* DESCRIPTION: Retrieve information about whether the device has a flash.
13612*
13613* PARAMETERS :
13614* @cameraId : Camera id to query
13615* @hasFlash : Boolean indicating whether there is a flash device
13616* associated with given camera
13617* @flashNode : If a flash device exists, this will be its device node.
13618*
13619* RETURN :
13620* None
13621*==========================================================================*/
13622void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13623 bool& hasFlash,
13624 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13625{
13626 cam_capability_t* camCapability = gCamCapability[cameraId];
13627 if (NULL == camCapability) {
13628 hasFlash = false;
13629 flashNode[0] = '\0';
13630 } else {
13631 hasFlash = camCapability->flash_available;
13632 strlcpy(flashNode,
13633 (char*)camCapability->flash_dev_name,
13634 QCAMERA_MAX_FILEPATH_LENGTH);
13635 }
13636}
13637
13638/*===========================================================================
13639* FUNCTION : getEepromVersionInfo
13640*
13641* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13642*
13643* PARAMETERS : None
13644*
13645* RETURN : string describing EEPROM version
13646* "\0" if no such info available
13647*==========================================================================*/
13648const char *QCamera3HardwareInterface::getEepromVersionInfo()
13649{
13650 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13651}
13652
13653/*===========================================================================
13654* FUNCTION : getLdafCalib
13655*
13656* DESCRIPTION: Retrieve Laser AF calibration data
13657*
13658* PARAMETERS : None
13659*
13660* RETURN : Two uint32_t describing laser AF calibration data
13661* NULL if none is available.
13662*==========================================================================*/
13663const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13664{
13665 if (mLdafCalibExist) {
13666 return &mLdafCalib[0];
13667 } else {
13668 return NULL;
13669 }
13670}
13671
13672/*===========================================================================
13673 * FUNCTION : dynamicUpdateMetaStreamInfo
13674 *
13675 * DESCRIPTION: This function:
13676 * (1) stops all the channels
13677 * (2) returns error on pending requests and buffers
13678 * (3) sends metastream_info in setparams
13679 * (4) starts all channels
13680 * This is useful when sensor has to be restarted to apply any
13681 * settings such as frame rate from a different sensor mode
13682 *
13683 * PARAMETERS : None
13684 *
13685 * RETURN : NO_ERROR on success
13686 * Error codes on failure
13687 *
13688 *==========================================================================*/
13689int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13690{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013691 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013692 int rc = NO_ERROR;
13693
13694 LOGD("E");
13695
13696 rc = stopAllChannels();
13697 if (rc < 0) {
13698 LOGE("stopAllChannels failed");
13699 return rc;
13700 }
13701
13702 rc = notifyErrorForPendingRequests();
13703 if (rc < 0) {
13704 LOGE("notifyErrorForPendingRequests failed");
13705 return rc;
13706 }
13707
13708 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13709 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13710 "Format:%d",
13711 mStreamConfigInfo.type[i],
13712 mStreamConfigInfo.stream_sizes[i].width,
13713 mStreamConfigInfo.stream_sizes[i].height,
13714 mStreamConfigInfo.postprocess_mask[i],
13715 mStreamConfigInfo.format[i]);
13716 }
13717
13718 /* Send meta stream info once again so that ISP can start */
13719 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13720 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13721 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13722 mParameters);
13723 if (rc < 0) {
13724 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13725 }
13726
13727 rc = startAllChannels();
13728 if (rc < 0) {
13729 LOGE("startAllChannels failed");
13730 return rc;
13731 }
13732
13733 LOGD("X");
13734 return rc;
13735}
13736
13737/*===========================================================================
13738 * FUNCTION : stopAllChannels
13739 *
13740 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13741 *
13742 * PARAMETERS : None
13743 *
13744 * RETURN : NO_ERROR on success
13745 * Error codes on failure
13746 *
13747 *==========================================================================*/
13748int32_t QCamera3HardwareInterface::stopAllChannels()
13749{
13750 int32_t rc = NO_ERROR;
13751
13752 LOGD("Stopping all channels");
13753 // Stop the Streams/Channels
13754 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13755 it != mStreamInfo.end(); it++) {
13756 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13757 if (channel) {
13758 channel->stop();
13759 }
13760 (*it)->status = INVALID;
13761 }
13762
13763 if (mSupportChannel) {
13764 mSupportChannel->stop();
13765 }
13766 if (mAnalysisChannel) {
13767 mAnalysisChannel->stop();
13768 }
13769 if (mRawDumpChannel) {
13770 mRawDumpChannel->stop();
13771 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013772 if (mHdrPlusRawSrcChannel) {
13773 mHdrPlusRawSrcChannel->stop();
13774 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013775 if (mMetadataChannel) {
13776 /* If content of mStreamInfo is not 0, there is metadata stream */
13777 mMetadataChannel->stop();
13778 }
13779
13780 LOGD("All channels stopped");
13781 return rc;
13782}
13783
13784/*===========================================================================
13785 * FUNCTION : startAllChannels
13786 *
13787 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13788 *
13789 * PARAMETERS : None
13790 *
13791 * RETURN : NO_ERROR on success
13792 * Error codes on failure
13793 *
13794 *==========================================================================*/
13795int32_t QCamera3HardwareInterface::startAllChannels()
13796{
13797 int32_t rc = NO_ERROR;
13798
13799 LOGD("Start all channels ");
13800 // Start the Streams/Channels
13801 if (mMetadataChannel) {
13802 /* If content of mStreamInfo is not 0, there is metadata stream */
13803 rc = mMetadataChannel->start();
13804 if (rc < 0) {
13805 LOGE("META channel start failed");
13806 return rc;
13807 }
13808 }
13809 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13810 it != mStreamInfo.end(); it++) {
13811 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13812 if (channel) {
13813 rc = channel->start();
13814 if (rc < 0) {
13815 LOGE("channel start failed");
13816 return rc;
13817 }
13818 }
13819 }
13820 if (mAnalysisChannel) {
13821 mAnalysisChannel->start();
13822 }
13823 if (mSupportChannel) {
13824 rc = mSupportChannel->start();
13825 if (rc < 0) {
13826 LOGE("Support channel start failed");
13827 return rc;
13828 }
13829 }
13830 if (mRawDumpChannel) {
13831 rc = mRawDumpChannel->start();
13832 if (rc < 0) {
13833 LOGE("RAW dump channel start failed");
13834 return rc;
13835 }
13836 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013837 if (mHdrPlusRawSrcChannel) {
13838 rc = mHdrPlusRawSrcChannel->start();
13839 if (rc < 0) {
13840 LOGE("HDR+ RAW channel start failed");
13841 return rc;
13842 }
13843 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013844
13845 LOGD("All channels started");
13846 return rc;
13847}
13848
13849/*===========================================================================
13850 * FUNCTION : notifyErrorForPendingRequests
13851 *
13852 * DESCRIPTION: This function sends error for all the pending requests/buffers
13853 *
13854 * PARAMETERS : None
13855 *
13856 * RETURN : Error codes
13857 * NO_ERROR on success
13858 *
13859 *==========================================================================*/
13860int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13861{
Emilian Peev7650c122017-01-19 08:24:33 -080013862 notifyErrorFoPendingDepthData(mDepthChannel);
13863
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013864 auto pendingRequest = mPendingRequestsList.begin();
13865 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070013866
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013867 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
13868 // buffers (for which buffers aren't sent yet).
13869 while (pendingRequest != mPendingRequestsList.end() ||
13870 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
13871 if (pendingRequest == mPendingRequestsList.end() ||
13872 pendingBuffer->frame_number < pendingRequest->frame_number) {
13873 // If metadata for this frame was sent, notify about a buffer error and returns buffers
13874 // with error.
13875 for (auto &info : pendingBuffer->mPendingBufferList) {
13876 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070013877 camera3_notify_msg_t notify_msg;
13878 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13879 notify_msg.type = CAMERA3_MSG_ERROR;
13880 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013881 notify_msg.message.error.error_stream = info.stream;
13882 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013883 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013884
13885 camera3_stream_buffer_t buffer = {};
13886 buffer.acquire_fence = -1;
13887 buffer.release_fence = -1;
13888 buffer.buffer = info.buffer;
13889 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13890 buffer.stream = info.stream;
13891 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070013892 }
13893
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013894 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13895 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
13896 pendingBuffer->frame_number > pendingRequest->frame_number) {
13897 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070013898 camera3_notify_msg_t notify_msg;
13899 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13900 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013901 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
13902 notify_msg.message.error.error_stream = nullptr;
13903 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013904 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013905
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013906 if (pendingRequest->input_buffer != nullptr) {
13907 camera3_capture_result result = {};
13908 result.frame_number = pendingRequest->frame_number;
13909 result.result = nullptr;
13910 result.input_buffer = pendingRequest->input_buffer;
13911 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013912 }
13913
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013914 mShutterDispatcher.clear(pendingRequest->frame_number);
13915 pendingRequest = mPendingRequestsList.erase(pendingRequest);
13916 } else {
13917 // If both buffers and result metadata weren't sent yet, notify about a request error
13918 // and return buffers with error.
13919 for (auto &info : pendingBuffer->mPendingBufferList) {
13920 camera3_notify_msg_t notify_msg;
13921 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13922 notify_msg.type = CAMERA3_MSG_ERROR;
13923 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13924 notify_msg.message.error.error_stream = info.stream;
13925 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
13926 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013927
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013928 camera3_stream_buffer_t buffer = {};
13929 buffer.acquire_fence = -1;
13930 buffer.release_fence = -1;
13931 buffer.buffer = info.buffer;
13932 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
13933 buffer.stream = info.stream;
13934 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
13935 }
13936
13937 if (pendingRequest->input_buffer != nullptr) {
13938 camera3_capture_result result = {};
13939 result.frame_number = pendingRequest->frame_number;
13940 result.result = nullptr;
13941 result.input_buffer = pendingRequest->input_buffer;
13942 orchestrateResult(&result);
13943 }
13944
13945 mShutterDispatcher.clear(pendingRequest->frame_number);
13946 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
13947 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070013948 }
13949 }
13950
13951 /* Reset pending frame Drop list and requests list */
13952 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013953 mShutterDispatcher.clear();
13954 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070013955 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013956 LOGH("Cleared all the pending buffers ");
13957
Chien-Yu Chen3f303522017-05-19 15:21:45 -070013958 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013959}
13960
13961bool QCamera3HardwareInterface::isOnEncoder(
13962 const cam_dimension_t max_viewfinder_size,
13963 uint32_t width, uint32_t height)
13964{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013965 return ((width > (uint32_t)max_viewfinder_size.width) ||
13966 (height > (uint32_t)max_viewfinder_size.height) ||
13967 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13968 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013969}
13970
13971/*===========================================================================
13972 * FUNCTION : setBundleInfo
13973 *
13974 * DESCRIPTION: Set bundle info for all streams that are bundle.
13975 *
13976 * PARAMETERS : None
13977 *
13978 * RETURN : NO_ERROR on success
13979 * Error codes on failure
13980 *==========================================================================*/
13981int32_t QCamera3HardwareInterface::setBundleInfo()
13982{
13983 int32_t rc = NO_ERROR;
13984
13985 if (mChannelHandle) {
13986 cam_bundle_config_t bundleInfo;
13987 memset(&bundleInfo, 0, sizeof(bundleInfo));
13988 rc = mCameraHandle->ops->get_bundle_info(
13989 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13990 if (rc != NO_ERROR) {
13991 LOGE("get_bundle_info failed");
13992 return rc;
13993 }
13994 if (mAnalysisChannel) {
13995 mAnalysisChannel->setBundleInfo(bundleInfo);
13996 }
13997 if (mSupportChannel) {
13998 mSupportChannel->setBundleInfo(bundleInfo);
13999 }
14000 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14001 it != mStreamInfo.end(); it++) {
14002 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14003 channel->setBundleInfo(bundleInfo);
14004 }
14005 if (mRawDumpChannel) {
14006 mRawDumpChannel->setBundleInfo(bundleInfo);
14007 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014008 if (mHdrPlusRawSrcChannel) {
14009 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14010 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014011 }
14012
14013 return rc;
14014}
14015
14016/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014017 * FUNCTION : setInstantAEC
14018 *
14019 * DESCRIPTION: Set Instant AEC related params.
14020 *
14021 * PARAMETERS :
14022 * @meta: CameraMetadata reference
14023 *
14024 * RETURN : NO_ERROR on success
14025 * Error codes on failure
14026 *==========================================================================*/
14027int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14028{
14029 int32_t rc = NO_ERROR;
14030 uint8_t val = 0;
14031 char prop[PROPERTY_VALUE_MAX];
14032
14033 // First try to configure instant AEC from framework metadata
14034 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14035 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14036 }
14037
14038 // If framework did not set this value, try to read from set prop.
14039 if (val == 0) {
14040 memset(prop, 0, sizeof(prop));
14041 property_get("persist.camera.instant.aec", prop, "0");
14042 val = (uint8_t)atoi(prop);
14043 }
14044
14045 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14046 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14047 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14048 mInstantAEC = val;
14049 mInstantAECSettledFrameNumber = 0;
14050 mInstantAecFrameIdxCount = 0;
14051 LOGH("instantAEC value set %d",val);
14052 if (mInstantAEC) {
14053 memset(prop, 0, sizeof(prop));
14054 property_get("persist.camera.ae.instant.bound", prop, "10");
14055 int32_t aec_frame_skip_cnt = atoi(prop);
14056 if (aec_frame_skip_cnt >= 0) {
14057 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14058 } else {
14059 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14060 rc = BAD_VALUE;
14061 }
14062 }
14063 } else {
14064 LOGE("Bad instant aec value set %d", val);
14065 rc = BAD_VALUE;
14066 }
14067 return rc;
14068}
14069
14070/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014071 * FUNCTION : get_num_overall_buffers
14072 *
14073 * DESCRIPTION: Estimate number of pending buffers across all requests.
14074 *
14075 * PARAMETERS : None
14076 *
14077 * RETURN : Number of overall pending buffers
14078 *
14079 *==========================================================================*/
14080uint32_t PendingBuffersMap::get_num_overall_buffers()
14081{
14082 uint32_t sum_buffers = 0;
14083 for (auto &req : mPendingBuffersInRequest) {
14084 sum_buffers += req.mPendingBufferList.size();
14085 }
14086 return sum_buffers;
14087}
14088
14089/*===========================================================================
14090 * FUNCTION : removeBuf
14091 *
14092 * DESCRIPTION: Remove a matching buffer from tracker.
14093 *
14094 * PARAMETERS : @buffer: image buffer for the callback
14095 *
14096 * RETURN : None
14097 *
14098 *==========================================================================*/
14099void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14100{
14101 bool buffer_found = false;
14102 for (auto req = mPendingBuffersInRequest.begin();
14103 req != mPendingBuffersInRequest.end(); req++) {
14104 for (auto k = req->mPendingBufferList.begin();
14105 k != req->mPendingBufferList.end(); k++ ) {
14106 if (k->buffer == buffer) {
14107 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14108 req->frame_number, buffer);
14109 k = req->mPendingBufferList.erase(k);
14110 if (req->mPendingBufferList.empty()) {
14111 // Remove this request from Map
14112 req = mPendingBuffersInRequest.erase(req);
14113 }
14114 buffer_found = true;
14115 break;
14116 }
14117 }
14118 if (buffer_found) {
14119 break;
14120 }
14121 }
14122 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14123 get_num_overall_buffers());
14124}
14125
14126/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014127 * FUNCTION : getBufErrStatus
14128 *
14129 * DESCRIPTION: get buffer error status
14130 *
14131 * PARAMETERS : @buffer: buffer handle
14132 *
14133 * RETURN : Error status
14134 *
14135 *==========================================================================*/
14136int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14137{
14138 for (auto& req : mPendingBuffersInRequest) {
14139 for (auto& k : req.mPendingBufferList) {
14140 if (k.buffer == buffer)
14141 return k.bufStatus;
14142 }
14143 }
14144 return CAMERA3_BUFFER_STATUS_OK;
14145}
14146
14147/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014148 * FUNCTION : setPAAFSupport
14149 *
14150 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14151 * feature mask according to stream type and filter
14152 * arrangement
14153 *
14154 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14155 * @stream_type: stream type
14156 * @filter_arrangement: filter arrangement
14157 *
14158 * RETURN : None
14159 *==========================================================================*/
14160void QCamera3HardwareInterface::setPAAFSupport(
14161 cam_feature_mask_t& feature_mask,
14162 cam_stream_type_t stream_type,
14163 cam_color_filter_arrangement_t filter_arrangement)
14164{
Thierry Strudel3d639192016-09-09 11:52:26 -070014165 switch (filter_arrangement) {
14166 case CAM_FILTER_ARRANGEMENT_RGGB:
14167 case CAM_FILTER_ARRANGEMENT_GRBG:
14168 case CAM_FILTER_ARRANGEMENT_GBRG:
14169 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014170 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14171 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014172 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014173 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14174 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014175 }
14176 break;
14177 case CAM_FILTER_ARRANGEMENT_Y:
14178 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14179 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14180 }
14181 break;
14182 default:
14183 break;
14184 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014185 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14186 feature_mask, stream_type, filter_arrangement);
14187
14188
Thierry Strudel3d639192016-09-09 11:52:26 -070014189}
14190
14191/*===========================================================================
14192* FUNCTION : getSensorMountAngle
14193*
14194* DESCRIPTION: Retrieve sensor mount angle
14195*
14196* PARAMETERS : None
14197*
14198* RETURN : sensor mount angle in uint32_t
14199*==========================================================================*/
14200uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14201{
14202 return gCamCapability[mCameraId]->sensor_mount_angle;
14203}
14204
14205/*===========================================================================
14206* FUNCTION : getRelatedCalibrationData
14207*
14208* DESCRIPTION: Retrieve related system calibration data
14209*
14210* PARAMETERS : None
14211*
14212* RETURN : Pointer of related system calibration data
14213*==========================================================================*/
14214const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14215{
14216 return (const cam_related_system_calibration_data_t *)
14217 &(gCamCapability[mCameraId]->related_cam_calibration);
14218}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014219
14220/*===========================================================================
14221 * FUNCTION : is60HzZone
14222 *
14223 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14224 *
14225 * PARAMETERS : None
14226 *
14227 * RETURN : True if in 60Hz zone, False otherwise
14228 *==========================================================================*/
14229bool QCamera3HardwareInterface::is60HzZone()
14230{
14231 time_t t = time(NULL);
14232 struct tm lt;
14233
14234 struct tm* r = localtime_r(&t, &lt);
14235
14236 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14237 return true;
14238 else
14239 return false;
14240}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014241
14242/*===========================================================================
14243 * FUNCTION : adjustBlackLevelForCFA
14244 *
14245 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14246 * of bayer CFA (Color Filter Array).
14247 *
14248 * PARAMETERS : @input: black level pattern in the order of RGGB
14249 * @output: black level pattern in the order of CFA
14250 * @color_arrangement: CFA color arrangement
14251 *
14252 * RETURN : None
14253 *==========================================================================*/
14254template<typename T>
14255void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14256 T input[BLACK_LEVEL_PATTERN_CNT],
14257 T output[BLACK_LEVEL_PATTERN_CNT],
14258 cam_color_filter_arrangement_t color_arrangement)
14259{
14260 switch (color_arrangement) {
14261 case CAM_FILTER_ARRANGEMENT_GRBG:
14262 output[0] = input[1];
14263 output[1] = input[0];
14264 output[2] = input[3];
14265 output[3] = input[2];
14266 break;
14267 case CAM_FILTER_ARRANGEMENT_GBRG:
14268 output[0] = input[2];
14269 output[1] = input[3];
14270 output[2] = input[0];
14271 output[3] = input[1];
14272 break;
14273 case CAM_FILTER_ARRANGEMENT_BGGR:
14274 output[0] = input[3];
14275 output[1] = input[2];
14276 output[2] = input[1];
14277 output[3] = input[0];
14278 break;
14279 case CAM_FILTER_ARRANGEMENT_RGGB:
14280 output[0] = input[0];
14281 output[1] = input[1];
14282 output[2] = input[2];
14283 output[3] = input[3];
14284 break;
14285 default:
14286 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14287 break;
14288 }
14289}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014290
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014291void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14292 CameraMetadata &resultMetadata,
14293 std::shared_ptr<metadata_buffer_t> settings)
14294{
14295 if (settings == nullptr) {
14296 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14297 return;
14298 }
14299
14300 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14301 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14302 }
14303
14304 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14305 String8 str((const char *)gps_methods);
14306 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14307 }
14308
14309 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14310 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14311 }
14312
14313 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14314 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14315 }
14316
14317 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14318 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14319 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14320 }
14321
14322 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14323 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14324 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14325 }
14326
14327 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14328 int32_t fwk_thumb_size[2];
14329 fwk_thumb_size[0] = thumb_size->width;
14330 fwk_thumb_size[1] = thumb_size->height;
14331 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14332 }
14333
14334 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14335 uint8_t fwk_intent = intent[0];
14336 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14337 }
14338}
14339
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014340bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14341 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14342 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014343{
14344 if (hdrPlusRequest == nullptr) return false;
14345
14346 // Check noise reduction mode is high quality.
14347 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14348 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14349 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014350 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14351 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014352 return false;
14353 }
14354
14355 // Check edge mode is high quality.
14356 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14357 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14358 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14359 return false;
14360 }
14361
14362 if (request.num_output_buffers != 1 ||
14363 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14364 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014365 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14366 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14367 request.output_buffers[0].stream->width,
14368 request.output_buffers[0].stream->height,
14369 request.output_buffers[0].stream->format);
14370 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014371 return false;
14372 }
14373
14374 // Get a YUV buffer from pic channel.
14375 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14376 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14377 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14378 if (res != OK) {
14379 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14380 __FUNCTION__, strerror(-res), res);
14381 return false;
14382 }
14383
14384 pbcamera::StreamBuffer buffer;
14385 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014386 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014387 buffer.data = yuvBuffer->buffer;
14388 buffer.dataSize = yuvBuffer->frame_len;
14389
14390 pbcamera::CaptureRequest pbRequest;
14391 pbRequest.id = request.frame_number;
14392 pbRequest.outputBuffers.push_back(buffer);
14393
14394 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014395 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014396 if (res != OK) {
14397 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14398 strerror(-res), res);
14399 return false;
14400 }
14401
14402 hdrPlusRequest->yuvBuffer = yuvBuffer;
14403 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14404
14405 return true;
14406}
14407
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014408status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14409{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014410 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14411 return OK;
14412 }
14413
14414 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14415 if (res != OK) {
14416 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14417 strerror(-res), res);
14418 return res;
14419 }
14420 gHdrPlusClientOpening = true;
14421
14422 return OK;
14423}
14424
Chien-Yu Chenee335912017-02-09 17:53:20 -080014425status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14426{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014427 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014428
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014429 if (mHdrPlusModeEnabled) {
14430 return OK;
14431 }
14432
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014433 // Check if gHdrPlusClient is opened or being opened.
14434 if (gHdrPlusClient == nullptr) {
14435 if (gHdrPlusClientOpening) {
14436 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14437 return OK;
14438 }
14439
14440 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014441 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014442 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14443 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014444 return res;
14445 }
14446
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014447 // When opening HDR+ client completes, HDR+ mode will be enabled.
14448 return OK;
14449
Chien-Yu Chenee335912017-02-09 17:53:20 -080014450 }
14451
14452 // Configure stream for HDR+.
14453 res = configureHdrPlusStreamsLocked();
14454 if (res != OK) {
14455 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014456 return res;
14457 }
14458
14459 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14460 res = gHdrPlusClient->setZslHdrPlusMode(true);
14461 if (res != OK) {
14462 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014463 return res;
14464 }
14465
14466 mHdrPlusModeEnabled = true;
14467 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14468
14469 return OK;
14470}
14471
14472void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14473{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014474 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014475 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014476 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14477 if (res != OK) {
14478 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14479 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014480
14481 // Close HDR+ client so Easel can enter low power mode.
14482 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14483 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014484 }
14485
14486 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014487 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014488 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14489}
14490
14491status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014492{
14493 pbcamera::InputConfiguration inputConfig;
14494 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14495 status_t res = OK;
14496
14497 // Configure HDR+ client streams.
14498 // Get input config.
14499 if (mHdrPlusRawSrcChannel) {
14500 // HDR+ input buffers will be provided by HAL.
14501 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14502 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14503 if (res != OK) {
14504 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14505 __FUNCTION__, strerror(-res), res);
14506 return res;
14507 }
14508
14509 inputConfig.isSensorInput = false;
14510 } else {
14511 // Sensor MIPI will send data to Easel.
14512 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014513 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014514 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14515 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14516 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14517 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14518 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070014519 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014520 if (mSensorModeInfo.num_raw_bits != 10) {
14521 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14522 mSensorModeInfo.num_raw_bits);
14523 return BAD_VALUE;
14524 }
14525
14526 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014527 }
14528
14529 // Get output configurations.
14530 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014531 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014532
14533 // Easel may need to output YUV output buffers if mPictureChannel was created.
14534 pbcamera::StreamConfiguration yuvOutputConfig;
14535 if (mPictureChannel != nullptr) {
14536 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14537 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14538 if (res != OK) {
14539 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14540 __FUNCTION__, strerror(-res), res);
14541
14542 return res;
14543 }
14544
14545 outputStreamConfigs.push_back(yuvOutputConfig);
14546 }
14547
14548 // TODO: consider other channels for YUV output buffers.
14549
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014550 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014551 if (res != OK) {
14552 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14553 strerror(-res), res);
14554 return res;
14555 }
14556
14557 return OK;
14558}
14559
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014560void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14561{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014562 if (client == nullptr) {
14563 ALOGE("%s: Opened client is null.", __FUNCTION__);
14564 return;
14565 }
14566
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014567 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014568 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14569
14570 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014571 if (!gHdrPlusClientOpening) {
14572 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14573 return;
14574 }
14575
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014576 gHdrPlusClient = std::move(client);
14577 gHdrPlusClientOpening = false;
14578
14579 // Set static metadata.
14580 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14581 if (res != OK) {
14582 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14583 __FUNCTION__, strerror(-res), res);
14584 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14585 gHdrPlusClient = nullptr;
14586 return;
14587 }
14588
14589 // Enable HDR+ mode.
14590 res = enableHdrPlusModeLocked();
14591 if (res != OK) {
14592 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14593 }
14594}
14595
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014596void QCamera3HardwareInterface::onOpenFailed(status_t err)
14597{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014598 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14599 Mutex::Autolock l(gHdrPlusClientLock);
14600 gHdrPlusClientOpening = false;
14601}
14602
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014603void QCamera3HardwareInterface::onFatalError()
14604{
14605 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14606
14607 // Set HAL state to error.
14608 pthread_mutex_lock(&mMutex);
14609 mState = ERROR;
14610 pthread_mutex_unlock(&mMutex);
14611
14612 handleCameraDeviceError();
14613}
14614
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014615void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014616 const camera_metadata_t &resultMetadata)
14617{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014618 if (result != nullptr) {
14619 if (result->outputBuffers.size() != 1) {
14620 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14621 result->outputBuffers.size());
14622 return;
14623 }
14624
14625 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14626 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14627 result->outputBuffers[0].streamId);
14628 return;
14629 }
14630
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014631 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014632 HdrPlusPendingRequest pendingRequest;
14633 {
14634 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14635 auto req = mHdrPlusPendingRequests.find(result->requestId);
14636 pendingRequest = req->second;
14637 }
14638
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014639 // Update the result metadata with the settings of the HDR+ still capture request because
14640 // the result metadata belongs to a ZSL buffer.
14641 CameraMetadata metadata;
14642 metadata = &resultMetadata;
14643 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14644 camera_metadata_t* updatedResultMetadata = metadata.release();
14645
14646 QCamera3PicChannel *picChannel =
14647 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14648
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014649 // Check if dumping HDR+ YUV output is enabled.
14650 char prop[PROPERTY_VALUE_MAX];
14651 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14652 bool dumpYuvOutput = atoi(prop);
14653
14654 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014655 // Dump yuv buffer to a ppm file.
14656 pbcamera::StreamConfiguration outputConfig;
14657 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14658 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14659 if (rc == OK) {
14660 char buf[FILENAME_MAX] = {};
14661 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14662 result->requestId, result->outputBuffers[0].streamId,
14663 outputConfig.image.width, outputConfig.image.height);
14664
14665 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14666 } else {
14667 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14668 __FUNCTION__, strerror(-rc), rc);
14669 }
14670 }
14671
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014672 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14673 auto halMetadata = std::make_shared<metadata_buffer_t>();
14674 clear_metadata_buffer(halMetadata.get());
14675
14676 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14677 // encoding.
14678 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14679 halStreamId, /*minFrameDuration*/0);
14680 if (res == OK) {
14681 // Return the buffer to pic channel for encoding.
14682 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14683 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14684 halMetadata);
14685 } else {
14686 // Return the buffer without encoding.
14687 // TODO: This should not happen but we may want to report an error buffer to camera
14688 // service.
14689 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14690 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14691 strerror(-res), res);
14692 }
14693
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014694 // Find the timestamp
14695 camera_metadata_ro_entry_t entry;
14696 res = find_camera_metadata_ro_entry(updatedResultMetadata,
14697 ANDROID_SENSOR_TIMESTAMP, &entry);
14698 if (res != OK) {
14699 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
14700 __FUNCTION__, result->requestId, strerror(-res), res);
14701 } else {
14702 mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
14703 }
14704
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014705 // Send HDR+ metadata to framework.
14706 {
14707 pthread_mutex_lock(&mMutex);
14708
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014709 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
14710 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014711 pthread_mutex_unlock(&mMutex);
14712 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014713
14714 // Remove the HDR+ pending request.
14715 {
14716 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14717 auto req = mHdrPlusPendingRequests.find(result->requestId);
14718 mHdrPlusPendingRequests.erase(req);
14719 }
14720 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014721}
14722
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014723void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14724{
14725 if (failedResult == nullptr) {
14726 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14727 return;
14728 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014729
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014730 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014731
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014732 // Remove the pending HDR+ request.
14733 {
14734 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14735 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14736
14737 // Return the buffer to pic channel.
14738 QCamera3PicChannel *picChannel =
14739 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14740 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14741
14742 mHdrPlusPendingRequests.erase(pendingRequest);
14743 }
14744
14745 pthread_mutex_lock(&mMutex);
14746
14747 // Find the pending buffers.
14748 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14749 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14750 if (pendingBuffers->frame_number == failedResult->requestId) {
14751 break;
14752 }
14753 pendingBuffers++;
14754 }
14755
14756 // Send out buffer errors for the pending buffers.
14757 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14758 std::vector<camera3_stream_buffer_t> streamBuffers;
14759 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14760 // Prepare a stream buffer.
14761 camera3_stream_buffer_t streamBuffer = {};
14762 streamBuffer.stream = buffer.stream;
14763 streamBuffer.buffer = buffer.buffer;
14764 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14765 streamBuffer.acquire_fence = -1;
14766 streamBuffer.release_fence = -1;
14767
14768 streamBuffers.push_back(streamBuffer);
14769
14770 // Send out error buffer event.
14771 camera3_notify_msg_t notify_msg = {};
14772 notify_msg.type = CAMERA3_MSG_ERROR;
14773 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14774 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14775 notify_msg.message.error.error_stream = buffer.stream;
14776
14777 orchestrateNotify(&notify_msg);
14778 }
14779
14780 camera3_capture_result_t result = {};
14781 result.frame_number = pendingBuffers->frame_number;
14782 result.num_output_buffers = streamBuffers.size();
14783 result.output_buffers = &streamBuffers[0];
14784
14785 // Send out result with buffer errors.
14786 orchestrateResult(&result);
14787
14788 // Remove pending buffers.
14789 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14790 }
14791
14792 // Remove pending request.
14793 auto halRequest = mPendingRequestsList.begin();
14794 while (halRequest != mPendingRequestsList.end()) {
14795 if (halRequest->frame_number == failedResult->requestId) {
14796 mPendingRequestsList.erase(halRequest);
14797 break;
14798 }
14799 halRequest++;
14800 }
14801
14802 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014803}
14804
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014805
14806ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
14807 mParent(parent) {}
14808
14809void ShutterDispatcher::expectShutter(uint32_t frameNumber)
14810{
14811 std::lock_guard<std::mutex> lock(mLock);
14812 mShutters.emplace(frameNumber, Shutter());
14813}
14814
14815void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
14816{
14817 std::lock_guard<std::mutex> lock(mLock);
14818
14819 // Make this frame's shutter ready.
14820 auto shutter = mShutters.find(frameNumber);
14821 if (shutter == mShutters.end()) {
14822 // Shutter was already sent.
14823 return;
14824 }
14825
14826 shutter->second.ready = true;
14827 shutter->second.timestamp = timestamp;
14828
14829 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
14830 shutter = mShutters.begin();
14831 while (shutter != mShutters.end()) {
14832 if (!shutter->second.ready) {
14833 // If this shutter is not ready, the following shutters can't be sent.
14834 break;
14835 }
14836
14837 camera3_notify_msg_t msg = {};
14838 msg.type = CAMERA3_MSG_SHUTTER;
14839 msg.message.shutter.frame_number = shutter->first;
14840 msg.message.shutter.timestamp = shutter->second.timestamp;
14841 mParent->orchestrateNotify(&msg);
14842
14843 shutter = mShutters.erase(shutter);
14844 }
14845}
14846
14847void ShutterDispatcher::clear(uint32_t frameNumber)
14848{
14849 std::lock_guard<std::mutex> lock(mLock);
14850 mShutters.erase(frameNumber);
14851}
14852
14853void ShutterDispatcher::clear()
14854{
14855 std::lock_guard<std::mutex> lock(mLock);
14856
14857 // Log errors for stale shutters.
14858 for (auto &shutter : mShutters) {
14859 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
14860 __FUNCTION__, shutter.first, shutter.second.ready,
14861 shutter.second.timestamp);
14862 }
14863 mShutters.clear();
14864}
14865
14866OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
14867 mParent(parent) {}
14868
14869status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
14870{
14871 std::lock_guard<std::mutex> lock(mLock);
14872 mStreamBuffers.clear();
14873 if (!streamList) {
14874 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
14875 return -EINVAL;
14876 }
14877
14878 // Create a "frame-number -> buffer" map for each stream.
14879 for (uint32_t i = 0; i < streamList->num_streams; i++) {
14880 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
14881 }
14882
14883 return OK;
14884}
14885
14886status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
14887{
14888 std::lock_guard<std::mutex> lock(mLock);
14889
14890 // Find the "frame-number -> buffer" map for the stream.
14891 auto buffers = mStreamBuffers.find(stream);
14892 if (buffers == mStreamBuffers.end()) {
14893 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
14894 return -EINVAL;
14895 }
14896
14897 // Create an unready buffer for this frame number.
14898 buffers->second.emplace(frameNumber, Buffer());
14899 return OK;
14900}
14901
14902void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
14903 const camera3_stream_buffer_t &buffer)
14904{
14905 std::lock_guard<std::mutex> lock(mLock);
14906
14907 // Find the frame number -> buffer map for the stream.
14908 auto buffers = mStreamBuffers.find(buffer.stream);
14909 if (buffers == mStreamBuffers.end()) {
14910 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
14911 return;
14912 }
14913
14914 // Find the unready buffer this frame number and mark it ready.
14915 auto pendingBuffer = buffers->second.find(frameNumber);
14916 if (pendingBuffer == buffers->second.end()) {
14917 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
14918 return;
14919 }
14920
14921 pendingBuffer->second.ready = true;
14922 pendingBuffer->second.buffer = buffer;
14923
14924 // Iterate through the buffers and send out buffers until the one that's not ready yet.
14925 pendingBuffer = buffers->second.begin();
14926 while (pendingBuffer != buffers->second.end()) {
14927 if (!pendingBuffer->second.ready) {
14928 // If this buffer is not ready, the following buffers can't be sent.
14929 break;
14930 }
14931
14932 camera3_capture_result_t result = {};
14933 result.frame_number = pendingBuffer->first;
14934 result.num_output_buffers = 1;
14935 result.output_buffers = &pendingBuffer->second.buffer;
14936
14937 // Send out result with buffer errors.
14938 mParent->orchestrateResult(&result);
14939
14940 pendingBuffer = buffers->second.erase(pendingBuffer);
14941 }
14942}
14943
14944void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
14945{
14946 std::lock_guard<std::mutex> lock(mLock);
14947
14948 // Log errors for stale buffers.
14949 for (auto &buffers : mStreamBuffers) {
14950 for (auto &buffer : buffers.second) {
14951 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
14952 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
14953 }
14954 buffers.second.clear();
14955 }
14956
14957 if (clearConfiguredStreams) {
14958 mStreamBuffers.clear();
14959 }
14960}
14961
Thierry Strudel3d639192016-09-09 11:52:26 -070014962}; //end namespace qcamera