blob: f8eabe45e3c964ebc70f00f4ba3d495dd95d412d [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Thierry Strudel04e026f2016-10-10 11:27:36 -0700117/* Face landmarks indices */
118#define LEFT_EYE_X 0
119#define LEFT_EYE_Y 1
120#define RIGHT_EYE_X 2
121#define RIGHT_EYE_Y 3
122#define MOUTH_X 4
123#define MOUTH_Y 5
124#define TOTAL_LANDMARK_INDICES 6
125
Zhijun He2a5df222017-04-04 18:20:38 -0700126// Max preferred zoom
127#define MAX_PREFERRED_ZOOM_RATIO 5.0
128
Thierry Strudel3d639192016-09-09 11:52:26 -0700129cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
130const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
131extern pthread_mutex_t gCamLock;
132volatile uint32_t gCamHal3LogLevel = 1;
133extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700134
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800135// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700136// The following Easel related variables must be protected by gHdrPlusClientLock.
137EaselManagerClient gEaselManagerClient;
138bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
139std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
140bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700141bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700142bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800144// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
145bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700146
147Mutex gHdrPlusClientLock; // Protect above Easel related variables.
148
Thierry Strudel3d639192016-09-09 11:52:26 -0700149
150const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
151 {"On", CAM_CDS_MODE_ON},
152 {"Off", CAM_CDS_MODE_OFF},
153 {"Auto",CAM_CDS_MODE_AUTO}
154};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700155const QCamera3HardwareInterface::QCameraMap<
156 camera_metadata_enum_android_video_hdr_mode_t,
157 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
158 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
159 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
160};
161
Thierry Strudel54dc9782017-02-15 12:12:10 -0800162const QCamera3HardwareInterface::QCameraMap<
163 camera_metadata_enum_android_binning_correction_mode_t,
164 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
165 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
166 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
167};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700168
169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_ir_mode_t,
171 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
172 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
173 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
174 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
175};
Thierry Strudel3d639192016-09-09 11:52:26 -0700176
177const QCamera3HardwareInterface::QCameraMap<
178 camera_metadata_enum_android_control_effect_mode_t,
179 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
180 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
181 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
182 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
183 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
184 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
185 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
186 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
187 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
188 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
189};
190
191const QCamera3HardwareInterface::QCameraMap<
192 camera_metadata_enum_android_control_awb_mode_t,
193 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
194 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
195 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
196 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
197 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
198 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
199 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
200 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
201 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
202 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
203};
204
205const QCamera3HardwareInterface::QCameraMap<
206 camera_metadata_enum_android_control_scene_mode_t,
207 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
208 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
209 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
210 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
211 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
212 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
213 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
214 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
215 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
216 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
217 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
218 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
219 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
220 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
221 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
222 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800223 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
224 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700225};
226
227const QCamera3HardwareInterface::QCameraMap<
228 camera_metadata_enum_android_control_af_mode_t,
229 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
230 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
231 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
232 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
233 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
234 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
235 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
236 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
237};
238
239const QCamera3HardwareInterface::QCameraMap<
240 camera_metadata_enum_android_color_correction_aberration_mode_t,
241 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
242 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
243 CAM_COLOR_CORRECTION_ABERRATION_OFF },
244 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
245 CAM_COLOR_CORRECTION_ABERRATION_FAST },
246 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
247 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
248};
249
250const QCamera3HardwareInterface::QCameraMap<
251 camera_metadata_enum_android_control_ae_antibanding_mode_t,
252 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
253 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
254 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
255 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
256 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
257};
258
259const QCamera3HardwareInterface::QCameraMap<
260 camera_metadata_enum_android_control_ae_mode_t,
261 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
262 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
263 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
264 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
265 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
266 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
267};
268
269const QCamera3HardwareInterface::QCameraMap<
270 camera_metadata_enum_android_flash_mode_t,
271 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
272 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
273 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
274 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
275};
276
277const QCamera3HardwareInterface::QCameraMap<
278 camera_metadata_enum_android_statistics_face_detect_mode_t,
279 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
280 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
281 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
282 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
283};
284
285const QCamera3HardwareInterface::QCameraMap<
286 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
287 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
288 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
289 CAM_FOCUS_UNCALIBRATED },
290 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
291 CAM_FOCUS_APPROXIMATE },
292 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
293 CAM_FOCUS_CALIBRATED }
294};
295
296const QCamera3HardwareInterface::QCameraMap<
297 camera_metadata_enum_android_lens_state_t,
298 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
299 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
300 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
301};
302
303const int32_t available_thumbnail_sizes[] = {0, 0,
304 176, 144,
305 240, 144,
306 256, 144,
307 240, 160,
308 256, 154,
309 240, 240,
310 320, 240};
311
312const QCamera3HardwareInterface::QCameraMap<
313 camera_metadata_enum_android_sensor_test_pattern_mode_t,
314 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
315 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
316 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
317 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
318 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
319 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
320 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
321};
322
323/* Since there is no mapping for all the options some Android enum are not listed.
324 * Also, the order in this list is important because while mapping from HAL to Android it will
325 * traverse from lower to higher index which means that for HAL values that are map to different
326 * Android values, the traverse logic will select the first one found.
327 */
328const QCamera3HardwareInterface::QCameraMap<
329 camera_metadata_enum_android_sensor_reference_illuminant1_t,
330 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
331 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
332 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
333 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
334 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
335 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
336 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
337 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
338 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
339 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
340 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
347};
348
349const QCamera3HardwareInterface::QCameraMap<
350 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
351 { 60, CAM_HFR_MODE_60FPS},
352 { 90, CAM_HFR_MODE_90FPS},
353 { 120, CAM_HFR_MODE_120FPS},
354 { 150, CAM_HFR_MODE_150FPS},
355 { 180, CAM_HFR_MODE_180FPS},
356 { 210, CAM_HFR_MODE_210FPS},
357 { 240, CAM_HFR_MODE_240FPS},
358 { 480, CAM_HFR_MODE_480FPS},
359};
360
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700361const QCamera3HardwareInterface::QCameraMap<
362 qcamera3_ext_instant_aec_mode_t,
363 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
364 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
365 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
366 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
367};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800368
369const QCamera3HardwareInterface::QCameraMap<
370 qcamera3_ext_exposure_meter_mode_t,
371 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
372 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
373 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
374 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
375 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
376 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
377 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
378 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
379};
380
381const QCamera3HardwareInterface::QCameraMap<
382 qcamera3_ext_iso_mode_t,
383 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
384 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
385 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
386 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
387 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
388 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
389 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
390 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
391 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
392};
393
Thierry Strudel3d639192016-09-09 11:52:26 -0700394camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
395 .initialize = QCamera3HardwareInterface::initialize,
396 .configure_streams = QCamera3HardwareInterface::configure_streams,
397 .register_stream_buffers = NULL,
398 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
399 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
400 .get_metadata_vendor_tag_ops = NULL,
401 .dump = QCamera3HardwareInterface::dump,
402 .flush = QCamera3HardwareInterface::flush,
403 .reserved = {0},
404};
405
406// initialise to some default value
407uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
408
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700409static inline void logEaselEvent(const char *tag, const char *event) {
410 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
411 struct timespec ts = {};
412 static int64_t kMsPerSec = 1000;
413 static int64_t kNsPerMs = 1000000;
414 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
415 if (res != OK) {
416 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
417 } else {
418 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
419 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
420 }
421 }
422}
423
Thierry Strudel3d639192016-09-09 11:52:26 -0700424/*===========================================================================
425 * FUNCTION : QCamera3HardwareInterface
426 *
427 * DESCRIPTION: constructor of QCamera3HardwareInterface
428 *
429 * PARAMETERS :
430 * @cameraId : camera ID
431 *
432 * RETURN : none
433 *==========================================================================*/
434QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
435 const camera_module_callbacks_t *callbacks)
436 : mCameraId(cameraId),
437 mCameraHandle(NULL),
438 mCameraInitialized(false),
439 mCallbackOps(NULL),
440 mMetadataChannel(NULL),
441 mPictureChannel(NULL),
442 mRawChannel(NULL),
443 mSupportChannel(NULL),
444 mAnalysisChannel(NULL),
445 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700446 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700447 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800448 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800449 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700450 mChannelHandle(0),
451 mFirstConfiguration(true),
452 mFlush(false),
453 mFlushPerf(false),
454 mParamHeap(NULL),
455 mParameters(NULL),
456 mPrevParameters(NULL),
457 m_bIsVideo(false),
458 m_bIs4KVideo(false),
459 m_bEisSupportedSize(false),
460 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800461 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700462 m_MobicatMask(0),
463 mMinProcessedFrameDuration(0),
464 mMinJpegFrameDuration(0),
465 mMinRawFrameDuration(0),
466 mMetaFrameCount(0U),
467 mUpdateDebugLevel(false),
468 mCallbacks(callbacks),
469 mCaptureIntent(0),
470 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700471 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800472 /* DevCamDebug metadata internal m control*/
473 mDevCamDebugMetaEnable(0),
474 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700475 mBatchSize(0),
476 mToBeQueuedVidBufs(0),
477 mHFRVideoFps(DEFAULT_VIDEO_FPS),
478 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800479 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800480 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700481 mFirstFrameNumberInBatch(0),
482 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800483 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700484 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
485 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000486 mPDSupported(false),
487 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700488 mInstantAEC(false),
489 mResetInstantAEC(false),
490 mInstantAECSettledFrameNumber(0),
491 mAecSkipDisplayFrameBound(0),
492 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800493 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700494 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700495 mLastCustIntentFrmNum(-1),
496 mState(CLOSED),
497 mIsDeviceLinked(false),
498 mIsMainCamera(true),
499 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700500 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800501 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800502 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700503 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800504 mIsApInputUsedForHdrPlus(false),
505 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800506 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700507{
508 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700509 mCommon.init(gCamCapability[cameraId]);
510 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700511#ifndef USE_HAL_3_3
512 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
513#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700515#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mCameraDevice.common.close = close_camera_device;
517 mCameraDevice.ops = &mCameraOps;
518 mCameraDevice.priv = this;
519 gCamCapability[cameraId]->version = CAM_HAL_V3;
520 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
521 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
522 gCamCapability[cameraId]->min_num_pp_bufs = 3;
523
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800524 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700525
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800526 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700527 mPendingLiveRequest = 0;
528 mCurrentRequestId = -1;
529 pthread_mutex_init(&mMutex, NULL);
530
531 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
532 mDefaultMetadata[i] = NULL;
533
534 // Getting system props of different kinds
535 char prop[PROPERTY_VALUE_MAX];
536 memset(prop, 0, sizeof(prop));
537 property_get("persist.camera.raw.dump", prop, "0");
538 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800539 property_get("persist.camera.hal3.force.hdr", prop, "0");
540 mForceHdrSnapshot = atoi(prop);
541
Thierry Strudel3d639192016-09-09 11:52:26 -0700542 if (mEnableRawDump)
543 LOGD("Raw dump from Camera HAL enabled");
544
545 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
546 memset(mLdafCalib, 0, sizeof(mLdafCalib));
547
548 memset(prop, 0, sizeof(prop));
549 property_get("persist.camera.tnr.preview", prop, "0");
550 m_bTnrPreview = (uint8_t)atoi(prop);
551
552 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800553 property_get("persist.camera.swtnr.preview", prop, "1");
554 m_bSwTnrPreview = (uint8_t)atoi(prop);
555
556 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700557 property_get("persist.camera.tnr.video", prop, "0");
558 m_bTnrVideo = (uint8_t)atoi(prop);
559
560 memset(prop, 0, sizeof(prop));
561 property_get("persist.camera.avtimer.debug", prop, "0");
562 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800563 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700564
Thierry Strudel54dc9782017-02-15 12:12:10 -0800565 memset(prop, 0, sizeof(prop));
566 property_get("persist.camera.cacmode.disable", prop, "0");
567 m_cacModeDisabled = (uint8_t)atoi(prop);
568
Thierry Strudel3d639192016-09-09 11:52:26 -0700569 //Load and read GPU library.
570 lib_surface_utils = NULL;
571 LINK_get_surface_pixel_alignment = NULL;
572 mSurfaceStridePadding = CAM_PAD_TO_32;
573 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
574 if (lib_surface_utils) {
575 *(void **)&LINK_get_surface_pixel_alignment =
576 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
577 if (LINK_get_surface_pixel_alignment) {
578 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
579 }
580 dlclose(lib_surface_utils);
581 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700582
Emilian Peev0f3c3162017-03-15 12:57:46 +0000583 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
584 mPDSupported = (0 <= mPDIndex) ? true : false;
585
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700586 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700587}
588
589/*===========================================================================
590 * FUNCTION : ~QCamera3HardwareInterface
591 *
592 * DESCRIPTION: destructor of QCamera3HardwareInterface
593 *
594 * PARAMETERS : none
595 *
596 * RETURN : none
597 *==========================================================================*/
598QCamera3HardwareInterface::~QCamera3HardwareInterface()
599{
600 LOGD("E");
601
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800602 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700603
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800604 // Disable power hint and enable the perf lock for close camera
605 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
606 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
607
608 // unlink of dualcam during close camera
609 if (mIsDeviceLinked) {
610 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
611 &m_pDualCamCmdPtr->bundle_info;
612 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
613 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
614 pthread_mutex_lock(&gCamLock);
615
616 if (mIsMainCamera == 1) {
617 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
618 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
619 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
620 // related session id should be session id of linked session
621 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
622 } else {
623 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
624 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
625 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
626 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
627 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800628 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800629 pthread_mutex_unlock(&gCamLock);
630
631 rc = mCameraHandle->ops->set_dual_cam_cmd(
632 mCameraHandle->camera_handle);
633 if (rc < 0) {
634 LOGE("Dualcam: Unlink failed, but still proceed to close");
635 }
636 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700637
638 /* We need to stop all streams before deleting any stream */
639 if (mRawDumpChannel) {
640 mRawDumpChannel->stop();
641 }
642
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700643 if (mHdrPlusRawSrcChannel) {
644 mHdrPlusRawSrcChannel->stop();
645 }
646
Thierry Strudel3d639192016-09-09 11:52:26 -0700647 // NOTE: 'camera3_stream_t *' objects are already freed at
648 // this stage by the framework
649 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
650 it != mStreamInfo.end(); it++) {
651 QCamera3ProcessingChannel *channel = (*it)->channel;
652 if (channel) {
653 channel->stop();
654 }
655 }
656 if (mSupportChannel)
657 mSupportChannel->stop();
658
659 if (mAnalysisChannel) {
660 mAnalysisChannel->stop();
661 }
662 if (mMetadataChannel) {
663 mMetadataChannel->stop();
664 }
665 if (mChannelHandle) {
666 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
667 mChannelHandle);
668 LOGD("stopping channel %d", mChannelHandle);
669 }
670
671 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
672 it != mStreamInfo.end(); it++) {
673 QCamera3ProcessingChannel *channel = (*it)->channel;
674 if (channel)
675 delete channel;
676 free (*it);
677 }
678 if (mSupportChannel) {
679 delete mSupportChannel;
680 mSupportChannel = NULL;
681 }
682
683 if (mAnalysisChannel) {
684 delete mAnalysisChannel;
685 mAnalysisChannel = NULL;
686 }
687 if (mRawDumpChannel) {
688 delete mRawDumpChannel;
689 mRawDumpChannel = NULL;
690 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700691 if (mHdrPlusRawSrcChannel) {
692 delete mHdrPlusRawSrcChannel;
693 mHdrPlusRawSrcChannel = NULL;
694 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700695 if (mDummyBatchChannel) {
696 delete mDummyBatchChannel;
697 mDummyBatchChannel = NULL;
698 }
699
700 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800701 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700702
703 if (mMetadataChannel) {
704 delete mMetadataChannel;
705 mMetadataChannel = NULL;
706 }
707
708 /* Clean up all channels */
709 if (mCameraInitialized) {
710 if(!mFirstConfiguration){
711 //send the last unconfigure
712 cam_stream_size_info_t stream_config_info;
713 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
714 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
715 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800716 m_bIs4KVideo ? 0 :
717 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700718 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700719 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
720 stream_config_info);
721 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
722 if (rc < 0) {
723 LOGE("set_parms failed for unconfigure");
724 }
725 }
726 deinitParameters();
727 }
728
729 if (mChannelHandle) {
730 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
731 mChannelHandle);
732 LOGH("deleting channel %d", mChannelHandle);
733 mChannelHandle = 0;
734 }
735
736 if (mState != CLOSED)
737 closeCamera();
738
739 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
740 req.mPendingBufferList.clear();
741 }
742 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700743 for (pendingRequestIterator i = mPendingRequestsList.begin();
744 i != mPendingRequestsList.end();) {
745 i = erasePendingRequest(i);
746 }
747 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
748 if (mDefaultMetadata[i])
749 free_camera_metadata(mDefaultMetadata[i]);
750
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800751 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700752
753 pthread_cond_destroy(&mRequestCond);
754
755 pthread_cond_destroy(&mBuffersCond);
756
757 pthread_mutex_destroy(&mMutex);
758 LOGD("X");
759}
760
761/*===========================================================================
762 * FUNCTION : erasePendingRequest
763 *
764 * DESCRIPTION: function to erase a desired pending request after freeing any
765 * allocated memory
766 *
767 * PARAMETERS :
768 * @i : iterator pointing to pending request to be erased
769 *
770 * RETURN : iterator pointing to the next request
771 *==========================================================================*/
772QCamera3HardwareInterface::pendingRequestIterator
773 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
774{
775 if (i->input_buffer != NULL) {
776 free(i->input_buffer);
777 i->input_buffer = NULL;
778 }
779 if (i->settings != NULL)
780 free_camera_metadata((camera_metadata_t*)i->settings);
781 return mPendingRequestsList.erase(i);
782}
783
784/*===========================================================================
785 * FUNCTION : camEvtHandle
786 *
787 * DESCRIPTION: Function registered to mm-camera-interface to handle events
788 *
789 * PARAMETERS :
790 * @camera_handle : interface layer camera handle
791 * @evt : ptr to event
792 * @user_data : user data ptr
793 *
794 * RETURN : none
795 *==========================================================================*/
796void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
797 mm_camera_event_t *evt,
798 void *user_data)
799{
800 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
801 if (obj && evt) {
802 switch(evt->server_event_type) {
803 case CAM_EVENT_TYPE_DAEMON_DIED:
804 pthread_mutex_lock(&obj->mMutex);
805 obj->mState = ERROR;
806 pthread_mutex_unlock(&obj->mMutex);
807 LOGE("Fatal, camera daemon died");
808 break;
809
810 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
811 LOGD("HAL got request pull from Daemon");
812 pthread_mutex_lock(&obj->mMutex);
813 obj->mWokenUpByDaemon = true;
814 obj->unblockRequestIfNecessary();
815 pthread_mutex_unlock(&obj->mMutex);
816 break;
817
818 default:
819 LOGW("Warning: Unhandled event %d",
820 evt->server_event_type);
821 break;
822 }
823 } else {
824 LOGE("NULL user_data/evt");
825 }
826}
827
828/*===========================================================================
829 * FUNCTION : openCamera
830 *
831 * DESCRIPTION: open camera
832 *
833 * PARAMETERS :
834 * @hw_device : double ptr for camera device struct
835 *
836 * RETURN : int32_t type of status
837 * NO_ERROR -- success
838 * none-zero failure code
839 *==========================================================================*/
840int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
841{
842 int rc = 0;
843 if (mState != CLOSED) {
844 *hw_device = NULL;
845 return PERMISSION_DENIED;
846 }
847
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800848 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700849 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
850 mCameraId);
851
852 rc = openCamera();
853 if (rc == 0) {
854 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800855 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700856 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800857 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700858
Thierry Strudel3d639192016-09-09 11:52:26 -0700859 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
860 mCameraId, rc);
861
862 if (rc == NO_ERROR) {
863 mState = OPENED;
864 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800865
Thierry Strudel3d639192016-09-09 11:52:26 -0700866 return rc;
867}
868
869/*===========================================================================
870 * FUNCTION : openCamera
871 *
872 * DESCRIPTION: open camera
873 *
874 * PARAMETERS : none
875 *
876 * RETURN : int32_t type of status
877 * NO_ERROR -- success
878 * none-zero failure code
879 *==========================================================================*/
880int QCamera3HardwareInterface::openCamera()
881{
882 int rc = 0;
883 char value[PROPERTY_VALUE_MAX];
884
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800885 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700886 if (mCameraHandle) {
887 LOGE("Failure: Camera already opened");
888 return ALREADY_EXISTS;
889 }
890
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700891 {
892 Mutex::Autolock l(gHdrPlusClientLock);
893 if (gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700894 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700895 rc = gEaselManagerClient.resume();
896 if (rc != 0) {
897 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
898 return rc;
899 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800900 }
901 }
902
Thierry Strudel3d639192016-09-09 11:52:26 -0700903 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
904 if (rc < 0) {
905 LOGE("Failed to reserve flash for camera id: %d",
906 mCameraId);
907 return UNKNOWN_ERROR;
908 }
909
910 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
911 if (rc) {
912 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
913 return rc;
914 }
915
916 if (!mCameraHandle) {
917 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
918 return -ENODEV;
919 }
920
921 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
922 camEvtHandle, (void *)this);
923
924 if (rc < 0) {
925 LOGE("Error, failed to register event callback");
926 /* Not closing camera here since it is already handled in destructor */
927 return FAILED_TRANSACTION;
928 }
929
930 mExifParams.debug_params =
931 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
932 if (mExifParams.debug_params) {
933 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
934 } else {
935 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
936 return NO_MEMORY;
937 }
938 mFirstConfiguration = true;
939
940 //Notify display HAL that a camera session is active.
941 //But avoid calling the same during bootup because camera service might open/close
942 //cameras at boot time during its initialization and display service will also internally
943 //wait for camera service to initialize first while calling this display API, resulting in a
944 //deadlock situation. Since boot time camera open/close calls are made only to fetch
945 //capabilities, no need of this display bw optimization.
946 //Use "service.bootanim.exit" property to know boot status.
947 property_get("service.bootanim.exit", value, "0");
948 if (atoi(value) == 1) {
949 pthread_mutex_lock(&gCamLock);
950 if (gNumCameraSessions++ == 0) {
951 setCameraLaunchStatus(true);
952 }
953 pthread_mutex_unlock(&gCamLock);
954 }
955
956 //fill the session id needed while linking dual cam
957 pthread_mutex_lock(&gCamLock);
958 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
959 &sessionId[mCameraId]);
960 pthread_mutex_unlock(&gCamLock);
961
962 if (rc < 0) {
963 LOGE("Error, failed to get sessiion id");
964 return UNKNOWN_ERROR;
965 } else {
966 //Allocate related cam sync buffer
967 //this is needed for the payload that goes along with bundling cmd for related
968 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700969 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
970 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700971 if(rc != OK) {
972 rc = NO_MEMORY;
973 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
974 return NO_MEMORY;
975 }
976
977 //Map memory for related cam sync buffer
978 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700979 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
980 m_pDualCamCmdHeap->getFd(0),
981 sizeof(cam_dual_camera_cmd_info_t),
982 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700983 if(rc < 0) {
984 LOGE("Dualcam: failed to map Related cam sync buffer");
985 rc = FAILED_TRANSACTION;
986 return NO_MEMORY;
987 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700988 m_pDualCamCmdPtr =
989 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700990 }
991
992 LOGH("mCameraId=%d",mCameraId);
993
994 return NO_ERROR;
995}
996
997/*===========================================================================
998 * FUNCTION : closeCamera
999 *
1000 * DESCRIPTION: close camera
1001 *
1002 * PARAMETERS : none
1003 *
1004 * RETURN : int32_t type of status
1005 * NO_ERROR -- success
1006 * none-zero failure code
1007 *==========================================================================*/
1008int QCamera3HardwareInterface::closeCamera()
1009{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001010 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001011 int rc = NO_ERROR;
1012 char value[PROPERTY_VALUE_MAX];
1013
1014 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1015 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001016
1017 // unmap memory for related cam sync buffer
1018 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001019 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001020 if (NULL != m_pDualCamCmdHeap) {
1021 m_pDualCamCmdHeap->deallocate();
1022 delete m_pDualCamCmdHeap;
1023 m_pDualCamCmdHeap = NULL;
1024 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001025 }
1026
Thierry Strudel3d639192016-09-09 11:52:26 -07001027 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1028 mCameraHandle = NULL;
1029
1030 //reset session id to some invalid id
1031 pthread_mutex_lock(&gCamLock);
1032 sessionId[mCameraId] = 0xDEADBEEF;
1033 pthread_mutex_unlock(&gCamLock);
1034
1035 //Notify display HAL that there is no active camera session
1036 //but avoid calling the same during bootup. Refer to openCamera
1037 //for more details.
1038 property_get("service.bootanim.exit", value, "0");
1039 if (atoi(value) == 1) {
1040 pthread_mutex_lock(&gCamLock);
1041 if (--gNumCameraSessions == 0) {
1042 setCameraLaunchStatus(false);
1043 }
1044 pthread_mutex_unlock(&gCamLock);
1045 }
1046
Thierry Strudel3d639192016-09-09 11:52:26 -07001047 if (mExifParams.debug_params) {
1048 free(mExifParams.debug_params);
1049 mExifParams.debug_params = NULL;
1050 }
1051 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1052 LOGW("Failed to release flash for camera id: %d",
1053 mCameraId);
1054 }
1055 mState = CLOSED;
1056 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1057 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001058
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001059 {
1060 Mutex::Autolock l(gHdrPlusClientLock);
1061 if (gHdrPlusClient != nullptr) {
1062 // Disable HDR+ mode.
1063 disableHdrPlusModeLocked();
1064 // Disconnect Easel if it's connected.
1065 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1066 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001067 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001068
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001069 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001070 rc = gEaselManagerClient.stopMipi(mCameraId);
1071 if (rc != 0) {
1072 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1073 }
1074
1075 rc = gEaselManagerClient.suspend();
1076 if (rc != 0) {
1077 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1078 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001079 }
1080 }
1081
Thierry Strudel3d639192016-09-09 11:52:26 -07001082 return rc;
1083}
1084
1085/*===========================================================================
1086 * FUNCTION : initialize
1087 *
1088 * DESCRIPTION: Initialize frameworks callback functions
1089 *
1090 * PARAMETERS :
1091 * @callback_ops : callback function to frameworks
1092 *
1093 * RETURN :
1094 *
1095 *==========================================================================*/
1096int QCamera3HardwareInterface::initialize(
1097 const struct camera3_callback_ops *callback_ops)
1098{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001099 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001100 int rc;
1101
1102 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1103 pthread_mutex_lock(&mMutex);
1104
1105 // Validate current state
1106 switch (mState) {
1107 case OPENED:
1108 /* valid state */
1109 break;
1110 default:
1111 LOGE("Invalid state %d", mState);
1112 rc = -ENODEV;
1113 goto err1;
1114 }
1115
1116 rc = initParameters();
1117 if (rc < 0) {
1118 LOGE("initParamters failed %d", rc);
1119 goto err1;
1120 }
1121 mCallbackOps = callback_ops;
1122
1123 mChannelHandle = mCameraHandle->ops->add_channel(
1124 mCameraHandle->camera_handle, NULL, NULL, this);
1125 if (mChannelHandle == 0) {
1126 LOGE("add_channel failed");
1127 rc = -ENOMEM;
1128 pthread_mutex_unlock(&mMutex);
1129 return rc;
1130 }
1131
1132 pthread_mutex_unlock(&mMutex);
1133 mCameraInitialized = true;
1134 mState = INITIALIZED;
1135 LOGI("X");
1136 return 0;
1137
1138err1:
1139 pthread_mutex_unlock(&mMutex);
1140 return rc;
1141}
1142
1143/*===========================================================================
1144 * FUNCTION : validateStreamDimensions
1145 *
1146 * DESCRIPTION: Check if the configuration requested are those advertised
1147 *
1148 * PARAMETERS :
1149 * @stream_list : streams to be configured
1150 *
1151 * RETURN :
1152 *
1153 *==========================================================================*/
1154int QCamera3HardwareInterface::validateStreamDimensions(
1155 camera3_stream_configuration_t *streamList)
1156{
1157 int rc = NO_ERROR;
1158 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001159 uint32_t depthWidth = 0;
1160 uint32_t depthHeight = 0;
1161 if (mPDSupported) {
1162 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1163 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1164 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001165
1166 camera3_stream_t *inputStream = NULL;
1167 /*
1168 * Loop through all streams to find input stream if it exists*
1169 */
1170 for (size_t i = 0; i< streamList->num_streams; i++) {
1171 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1172 if (inputStream != NULL) {
1173 LOGE("Error, Multiple input streams requested");
1174 return -EINVAL;
1175 }
1176 inputStream = streamList->streams[i];
1177 }
1178 }
1179 /*
1180 * Loop through all streams requested in configuration
1181 * Check if unsupported sizes have been requested on any of them
1182 */
1183 for (size_t j = 0; j < streamList->num_streams; j++) {
1184 bool sizeFound = false;
1185 camera3_stream_t *newStream = streamList->streams[j];
1186
1187 uint32_t rotatedHeight = newStream->height;
1188 uint32_t rotatedWidth = newStream->width;
1189 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1190 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1191 rotatedHeight = newStream->width;
1192 rotatedWidth = newStream->height;
1193 }
1194
1195 /*
1196 * Sizes are different for each type of stream format check against
1197 * appropriate table.
1198 */
1199 switch (newStream->format) {
1200 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1201 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1202 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001203 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1204 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1205 mPDSupported) {
1206 if ((depthWidth == newStream->width) &&
1207 (depthHeight == newStream->height)) {
1208 sizeFound = true;
1209 }
1210 break;
1211 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001212 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1213 for (size_t i = 0; i < count; i++) {
1214 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1215 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1216 sizeFound = true;
1217 break;
1218 }
1219 }
1220 break;
1221 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001222 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1223 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001224 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001225 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001226 if ((depthSamplesCount == newStream->width) &&
1227 (1 == newStream->height)) {
1228 sizeFound = true;
1229 }
1230 break;
1231 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001232 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1233 /* Verify set size against generated sizes table */
1234 for (size_t i = 0; i < count; i++) {
1235 if (((int32_t)rotatedWidth ==
1236 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1237 ((int32_t)rotatedHeight ==
1238 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1239 sizeFound = true;
1240 break;
1241 }
1242 }
1243 break;
1244 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1245 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1246 default:
1247 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1248 || newStream->stream_type == CAMERA3_STREAM_INPUT
1249 || IS_USAGE_ZSL(newStream->usage)) {
1250 if (((int32_t)rotatedWidth ==
1251 gCamCapability[mCameraId]->active_array_size.width) &&
1252 ((int32_t)rotatedHeight ==
1253 gCamCapability[mCameraId]->active_array_size.height)) {
1254 sizeFound = true;
1255 break;
1256 }
1257 /* We could potentially break here to enforce ZSL stream
1258 * set from frameworks always is full active array size
1259 * but it is not clear from the spc if framework will always
1260 * follow that, also we have logic to override to full array
1261 * size, so keeping the logic lenient at the moment
1262 */
1263 }
1264 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1265 MAX_SIZES_CNT);
1266 for (size_t i = 0; i < count; i++) {
1267 if (((int32_t)rotatedWidth ==
1268 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1269 ((int32_t)rotatedHeight ==
1270 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1271 sizeFound = true;
1272 break;
1273 }
1274 }
1275 break;
1276 } /* End of switch(newStream->format) */
1277
1278 /* We error out even if a single stream has unsupported size set */
1279 if (!sizeFound) {
1280 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1281 rotatedWidth, rotatedHeight, newStream->format,
1282 gCamCapability[mCameraId]->active_array_size.width,
1283 gCamCapability[mCameraId]->active_array_size.height);
1284 rc = -EINVAL;
1285 break;
1286 }
1287 } /* End of for each stream */
1288 return rc;
1289}
1290
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001291/*===========================================================================
1292 * FUNCTION : validateUsageFlags
1293 *
1294 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1295 *
1296 * PARAMETERS :
1297 * @stream_list : streams to be configured
1298 *
1299 * RETURN :
1300 * NO_ERROR if the usage flags are supported
1301 * error code if usage flags are not supported
1302 *
1303 *==========================================================================*/
1304int QCamera3HardwareInterface::validateUsageFlags(
1305 const camera3_stream_configuration_t* streamList)
1306{
1307 for (size_t j = 0; j < streamList->num_streams; j++) {
1308 const camera3_stream_t *newStream = streamList->streams[j];
1309
1310 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1311 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1312 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1313 continue;
1314 }
1315
1316 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1317 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1318 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1319 bool forcePreviewUBWC = true;
1320 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1321 forcePreviewUBWC = false;
1322 }
1323 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1324 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1325 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1326 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1327 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1328 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1329
1330 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1331 // So color spaces will always match.
1332
1333 // Check whether underlying formats of shared streams match.
1334 if (isVideo && isPreview && videoFormat != previewFormat) {
1335 LOGE("Combined video and preview usage flag is not supported");
1336 return -EINVAL;
1337 }
1338 if (isPreview && isZSL && previewFormat != zslFormat) {
1339 LOGE("Combined preview and zsl usage flag is not supported");
1340 return -EINVAL;
1341 }
1342 if (isVideo && isZSL && videoFormat != zslFormat) {
1343 LOGE("Combined video and zsl usage flag is not supported");
1344 return -EINVAL;
1345 }
1346 }
1347 return NO_ERROR;
1348}
1349
1350/*===========================================================================
1351 * FUNCTION : validateUsageFlagsForEis
1352 *
1353 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1354 *
1355 * PARAMETERS :
1356 * @stream_list : streams to be configured
1357 *
1358 * RETURN :
1359 * NO_ERROR if the usage flags are supported
1360 * error code if usage flags are not supported
1361 *
1362 *==========================================================================*/
1363int QCamera3HardwareInterface::validateUsageFlagsForEis(
1364 const camera3_stream_configuration_t* streamList)
1365{
1366 for (size_t j = 0; j < streamList->num_streams; j++) {
1367 const camera3_stream_t *newStream = streamList->streams[j];
1368
1369 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1370 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1371
1372 // Because EIS is "hard-coded" for certain use case, and current
1373 // implementation doesn't support shared preview and video on the same
1374 // stream, return failure if EIS is forced on.
1375 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1376 LOGE("Combined video and preview usage flag is not supported due to EIS");
1377 return -EINVAL;
1378 }
1379 }
1380 return NO_ERROR;
1381}
1382
Thierry Strudel3d639192016-09-09 11:52:26 -07001383/*==============================================================================
1384 * FUNCTION : isSupportChannelNeeded
1385 *
1386 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1387 *
1388 * PARAMETERS :
1389 * @stream_list : streams to be configured
1390 * @stream_config_info : the config info for streams to be configured
1391 *
1392 * RETURN : Boolen true/false decision
1393 *
1394 *==========================================================================*/
1395bool QCamera3HardwareInterface::isSupportChannelNeeded(
1396 camera3_stream_configuration_t *streamList,
1397 cam_stream_size_info_t stream_config_info)
1398{
1399 uint32_t i;
1400 bool pprocRequested = false;
1401 /* Check for conditions where PProc pipeline does not have any streams*/
1402 for (i = 0; i < stream_config_info.num_streams; i++) {
1403 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1404 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1405 pprocRequested = true;
1406 break;
1407 }
1408 }
1409
1410 if (pprocRequested == false )
1411 return true;
1412
1413 /* Dummy stream needed if only raw or jpeg streams present */
1414 for (i = 0; i < streamList->num_streams; i++) {
1415 switch(streamList->streams[i]->format) {
1416 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1417 case HAL_PIXEL_FORMAT_RAW10:
1418 case HAL_PIXEL_FORMAT_RAW16:
1419 case HAL_PIXEL_FORMAT_BLOB:
1420 break;
1421 default:
1422 return false;
1423 }
1424 }
1425 return true;
1426}
1427
1428/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001429 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001430 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001431 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001432 *
1433 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001434 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001435 *
1436 * RETURN : int32_t type of status
1437 * NO_ERROR -- success
1438 * none-zero failure code
1439 *
1440 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001441int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001442{
1443 int32_t rc = NO_ERROR;
1444
1445 cam_dimension_t max_dim = {0, 0};
1446 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1447 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1448 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1449 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1450 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1451 }
1452
1453 clear_metadata_buffer(mParameters);
1454
1455 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1456 max_dim);
1457 if (rc != NO_ERROR) {
1458 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1459 return rc;
1460 }
1461
1462 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1463 if (rc != NO_ERROR) {
1464 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1465 return rc;
1466 }
1467
1468 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001469 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001470
1471 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1472 mParameters);
1473 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001474 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001475 return rc;
1476 }
1477
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001478 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001479 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1480 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1481 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1482 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1483 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001484
1485 return rc;
1486}
1487
1488/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001489 * FUNCTION : addToPPFeatureMask
1490 *
1491 * DESCRIPTION: add additional features to pp feature mask based on
1492 * stream type and usecase
1493 *
1494 * PARAMETERS :
1495 * @stream_format : stream type for feature mask
1496 * @stream_idx : stream idx within postprocess_mask list to change
1497 *
1498 * RETURN : NULL
1499 *
1500 *==========================================================================*/
1501void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1502 uint32_t stream_idx)
1503{
1504 char feature_mask_value[PROPERTY_VALUE_MAX];
1505 cam_feature_mask_t feature_mask;
1506 int args_converted;
1507 int property_len;
1508
1509 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001510#ifdef _LE_CAMERA_
1511 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1512 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1513 property_len = property_get("persist.camera.hal3.feature",
1514 feature_mask_value, swtnr_feature_mask_value);
1515#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001516 property_len = property_get("persist.camera.hal3.feature",
1517 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001518#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001519 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1520 (feature_mask_value[1] == 'x')) {
1521 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1522 } else {
1523 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1524 }
1525 if (1 != args_converted) {
1526 feature_mask = 0;
1527 LOGE("Wrong feature mask %s", feature_mask_value);
1528 return;
1529 }
1530
1531 switch (stream_format) {
1532 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1533 /* Add LLVD to pp feature mask only if video hint is enabled */
1534 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1535 mStreamConfigInfo.postprocess_mask[stream_idx]
1536 |= CAM_QTI_FEATURE_SW_TNR;
1537 LOGH("Added SW TNR to pp feature mask");
1538 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1539 mStreamConfigInfo.postprocess_mask[stream_idx]
1540 |= CAM_QCOM_FEATURE_LLVD;
1541 LOGH("Added LLVD SeeMore to pp feature mask");
1542 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001543 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1544 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1545 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1546 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001547 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1548 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1549 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1550 CAM_QTI_FEATURE_BINNING_CORRECTION;
1551 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001552 break;
1553 }
1554 default:
1555 break;
1556 }
1557 LOGD("PP feature mask %llx",
1558 mStreamConfigInfo.postprocess_mask[stream_idx]);
1559}
1560
1561/*==============================================================================
1562 * FUNCTION : updateFpsInPreviewBuffer
1563 *
1564 * DESCRIPTION: update FPS information in preview buffer.
1565 *
1566 * PARAMETERS :
1567 * @metadata : pointer to metadata buffer
1568 * @frame_number: frame_number to look for in pending buffer list
1569 *
1570 * RETURN : None
1571 *
1572 *==========================================================================*/
1573void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1574 uint32_t frame_number)
1575{
1576 // Mark all pending buffers for this particular request
1577 // with corresponding framerate information
1578 for (List<PendingBuffersInRequest>::iterator req =
1579 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1580 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1581 for(List<PendingBufferInfo>::iterator j =
1582 req->mPendingBufferList.begin();
1583 j != req->mPendingBufferList.end(); j++) {
1584 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1585 if ((req->frame_number == frame_number) &&
1586 (channel->getStreamTypeMask() &
1587 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1588 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1589 CAM_INTF_PARM_FPS_RANGE, metadata) {
1590 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1591 struct private_handle_t *priv_handle =
1592 (struct private_handle_t *)(*(j->buffer));
1593 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1594 }
1595 }
1596 }
1597 }
1598}
1599
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001600/*==============================================================================
1601 * FUNCTION : updateTimeStampInPendingBuffers
1602 *
1603 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1604 * of a frame number
1605 *
1606 * PARAMETERS :
1607 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1608 * @timestamp : timestamp to be set
1609 *
1610 * RETURN : None
1611 *
1612 *==========================================================================*/
1613void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1614 uint32_t frameNumber, nsecs_t timestamp)
1615{
1616 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1617 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1618 if (req->frame_number != frameNumber)
1619 continue;
1620
1621 for (auto k = req->mPendingBufferList.begin();
1622 k != req->mPendingBufferList.end(); k++ ) {
1623 struct private_handle_t *priv_handle =
1624 (struct private_handle_t *) (*(k->buffer));
1625 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1626 }
1627 }
1628 return;
1629}
1630
Thierry Strudel3d639192016-09-09 11:52:26 -07001631/*===========================================================================
1632 * FUNCTION : configureStreams
1633 *
1634 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1635 * and output streams.
1636 *
1637 * PARAMETERS :
1638 * @stream_list : streams to be configured
1639 *
1640 * RETURN :
1641 *
1642 *==========================================================================*/
1643int QCamera3HardwareInterface::configureStreams(
1644 camera3_stream_configuration_t *streamList)
1645{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001646 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001647 int rc = 0;
1648
1649 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001650 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001651 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001652 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001653
1654 return rc;
1655}
1656
1657/*===========================================================================
1658 * FUNCTION : configureStreamsPerfLocked
1659 *
1660 * DESCRIPTION: configureStreams while perfLock is held.
1661 *
1662 * PARAMETERS :
1663 * @stream_list : streams to be configured
1664 *
1665 * RETURN : int32_t type of status
1666 * NO_ERROR -- success
1667 * none-zero failure code
1668 *==========================================================================*/
1669int QCamera3HardwareInterface::configureStreamsPerfLocked(
1670 camera3_stream_configuration_t *streamList)
1671{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001672 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001673 int rc = 0;
1674
1675 // Sanity check stream_list
1676 if (streamList == NULL) {
1677 LOGE("NULL stream configuration");
1678 return BAD_VALUE;
1679 }
1680 if (streamList->streams == NULL) {
1681 LOGE("NULL stream list");
1682 return BAD_VALUE;
1683 }
1684
1685 if (streamList->num_streams < 1) {
1686 LOGE("Bad number of streams requested: %d",
1687 streamList->num_streams);
1688 return BAD_VALUE;
1689 }
1690
1691 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1692 LOGE("Maximum number of streams %d exceeded: %d",
1693 MAX_NUM_STREAMS, streamList->num_streams);
1694 return BAD_VALUE;
1695 }
1696
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001697 rc = validateUsageFlags(streamList);
1698 if (rc != NO_ERROR) {
1699 return rc;
1700 }
1701
Thierry Strudel3d639192016-09-09 11:52:26 -07001702 mOpMode = streamList->operation_mode;
1703 LOGD("mOpMode: %d", mOpMode);
1704
1705 /* first invalidate all the steams in the mStreamList
1706 * if they appear again, they will be validated */
1707 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1708 it != mStreamInfo.end(); it++) {
1709 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1710 if (channel) {
1711 channel->stop();
1712 }
1713 (*it)->status = INVALID;
1714 }
1715
1716 if (mRawDumpChannel) {
1717 mRawDumpChannel->stop();
1718 delete mRawDumpChannel;
1719 mRawDumpChannel = NULL;
1720 }
1721
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001722 if (mHdrPlusRawSrcChannel) {
1723 mHdrPlusRawSrcChannel->stop();
1724 delete mHdrPlusRawSrcChannel;
1725 mHdrPlusRawSrcChannel = NULL;
1726 }
1727
Thierry Strudel3d639192016-09-09 11:52:26 -07001728 if (mSupportChannel)
1729 mSupportChannel->stop();
1730
1731 if (mAnalysisChannel) {
1732 mAnalysisChannel->stop();
1733 }
1734 if (mMetadataChannel) {
1735 /* If content of mStreamInfo is not 0, there is metadata stream */
1736 mMetadataChannel->stop();
1737 }
1738 if (mChannelHandle) {
1739 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1740 mChannelHandle);
1741 LOGD("stopping channel %d", mChannelHandle);
1742 }
1743
1744 pthread_mutex_lock(&mMutex);
1745
1746 // Check state
1747 switch (mState) {
1748 case INITIALIZED:
1749 case CONFIGURED:
1750 case STARTED:
1751 /* valid state */
1752 break;
1753 default:
1754 LOGE("Invalid state %d", mState);
1755 pthread_mutex_unlock(&mMutex);
1756 return -ENODEV;
1757 }
1758
1759 /* Check whether we have video stream */
1760 m_bIs4KVideo = false;
1761 m_bIsVideo = false;
1762 m_bEisSupportedSize = false;
1763 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001764 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001765 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001766 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001767 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001768 uint32_t videoWidth = 0U;
1769 uint32_t videoHeight = 0U;
1770 size_t rawStreamCnt = 0;
1771 size_t stallStreamCnt = 0;
1772 size_t processedStreamCnt = 0;
1773 // Number of streams on ISP encoder path
1774 size_t numStreamsOnEncoder = 0;
1775 size_t numYuv888OnEncoder = 0;
1776 bool bYuv888OverrideJpeg = false;
1777 cam_dimension_t largeYuv888Size = {0, 0};
1778 cam_dimension_t maxViewfinderSize = {0, 0};
1779 bool bJpegExceeds4K = false;
1780 bool bJpegOnEncoder = false;
1781 bool bUseCommonFeatureMask = false;
1782 cam_feature_mask_t commonFeatureMask = 0;
1783 bool bSmallJpegSize = false;
1784 uint32_t width_ratio;
1785 uint32_t height_ratio;
1786 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1787 camera3_stream_t *inputStream = NULL;
1788 bool isJpeg = false;
1789 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001790 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001791 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001792
1793 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1794
1795 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001796 uint8_t eis_prop_set;
1797 uint32_t maxEisWidth = 0;
1798 uint32_t maxEisHeight = 0;
1799
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001800 // Initialize all instant AEC related variables
1801 mInstantAEC = false;
1802 mResetInstantAEC = false;
1803 mInstantAECSettledFrameNumber = 0;
1804 mAecSkipDisplayFrameBound = 0;
1805 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001806 mCurrFeatureState = 0;
1807 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001808
Thierry Strudel3d639192016-09-09 11:52:26 -07001809 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1810
1811 size_t count = IS_TYPE_MAX;
1812 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1813 for (size_t i = 0; i < count; i++) {
1814 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001815 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1816 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001817 break;
1818 }
1819 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001820
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001821 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001822 maxEisWidth = MAX_EIS_WIDTH;
1823 maxEisHeight = MAX_EIS_HEIGHT;
1824 }
1825
1826 /* EIS setprop control */
1827 char eis_prop[PROPERTY_VALUE_MAX];
1828 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001829 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001830 eis_prop_set = (uint8_t)atoi(eis_prop);
1831
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001832 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001833 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1834
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001835 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1836 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001837
Thierry Strudel3d639192016-09-09 11:52:26 -07001838 /* stream configurations */
1839 for (size_t i = 0; i < streamList->num_streams; i++) {
1840 camera3_stream_t *newStream = streamList->streams[i];
1841 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1842 "height = %d, rotation = %d, usage = 0x%x",
1843 i, newStream->stream_type, newStream->format,
1844 newStream->width, newStream->height, newStream->rotation,
1845 newStream->usage);
1846 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1847 newStream->stream_type == CAMERA3_STREAM_INPUT){
1848 isZsl = true;
1849 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001850 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1851 IS_USAGE_PREVIEW(newStream->usage)) {
1852 isPreview = true;
1853 }
1854
Thierry Strudel3d639192016-09-09 11:52:26 -07001855 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1856 inputStream = newStream;
1857 }
1858
Emilian Peev7650c122017-01-19 08:24:33 -08001859 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1860 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001861 isJpeg = true;
1862 jpegSize.width = newStream->width;
1863 jpegSize.height = newStream->height;
1864 if (newStream->width > VIDEO_4K_WIDTH ||
1865 newStream->height > VIDEO_4K_HEIGHT)
1866 bJpegExceeds4K = true;
1867 }
1868
1869 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1870 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1871 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001872 // In HAL3 we can have multiple different video streams.
1873 // The variables video width and height are used below as
1874 // dimensions of the biggest of them
1875 if (videoWidth < newStream->width ||
1876 videoHeight < newStream->height) {
1877 videoWidth = newStream->width;
1878 videoHeight = newStream->height;
1879 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001880 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1881 (VIDEO_4K_HEIGHT <= newStream->height)) {
1882 m_bIs4KVideo = true;
1883 }
1884 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1885 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001886
Thierry Strudel3d639192016-09-09 11:52:26 -07001887 }
1888 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1889 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1890 switch (newStream->format) {
1891 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001892 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1893 depthPresent = true;
1894 break;
1895 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001896 stallStreamCnt++;
1897 if (isOnEncoder(maxViewfinderSize, newStream->width,
1898 newStream->height)) {
1899 numStreamsOnEncoder++;
1900 bJpegOnEncoder = true;
1901 }
1902 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1903 newStream->width);
1904 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1905 newStream->height);;
1906 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1907 "FATAL: max_downscale_factor cannot be zero and so assert");
1908 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1909 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1910 LOGH("Setting small jpeg size flag to true");
1911 bSmallJpegSize = true;
1912 }
1913 break;
1914 case HAL_PIXEL_FORMAT_RAW10:
1915 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1916 case HAL_PIXEL_FORMAT_RAW16:
1917 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001918 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1919 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1920 pdStatCount++;
1921 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001922 break;
1923 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1924 processedStreamCnt++;
1925 if (isOnEncoder(maxViewfinderSize, newStream->width,
1926 newStream->height)) {
1927 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1928 !IS_USAGE_ZSL(newStream->usage)) {
1929 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1930 }
1931 numStreamsOnEncoder++;
1932 }
1933 break;
1934 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1935 processedStreamCnt++;
1936 if (isOnEncoder(maxViewfinderSize, newStream->width,
1937 newStream->height)) {
1938 // If Yuv888 size is not greater than 4K, set feature mask
1939 // to SUPERSET so that it support concurrent request on
1940 // YUV and JPEG.
1941 if (newStream->width <= VIDEO_4K_WIDTH &&
1942 newStream->height <= VIDEO_4K_HEIGHT) {
1943 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1944 }
1945 numStreamsOnEncoder++;
1946 numYuv888OnEncoder++;
1947 largeYuv888Size.width = newStream->width;
1948 largeYuv888Size.height = newStream->height;
1949 }
1950 break;
1951 default:
1952 processedStreamCnt++;
1953 if (isOnEncoder(maxViewfinderSize, newStream->width,
1954 newStream->height)) {
1955 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1956 numStreamsOnEncoder++;
1957 }
1958 break;
1959 }
1960
1961 }
1962 }
1963
1964 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1965 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1966 !m_bIsVideo) {
1967 m_bEisEnable = false;
1968 }
1969
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001970 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1971 pthread_mutex_unlock(&mMutex);
1972 return -EINVAL;
1973 }
1974
Thierry Strudel54dc9782017-02-15 12:12:10 -08001975 uint8_t forceEnableTnr = 0;
1976 char tnr_prop[PROPERTY_VALUE_MAX];
1977 memset(tnr_prop, 0, sizeof(tnr_prop));
1978 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1979 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1980
Thierry Strudel3d639192016-09-09 11:52:26 -07001981 /* Logic to enable/disable TNR based on specific config size/etc.*/
1982 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1983 ((videoWidth == 1920 && videoHeight == 1080) ||
1984 (videoWidth == 1280 && videoHeight == 720)) &&
1985 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1986 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001987 else if (forceEnableTnr)
1988 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001989
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001990 char videoHdrProp[PROPERTY_VALUE_MAX];
1991 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1992 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1993 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1994
1995 if (hdr_mode_prop == 1 && m_bIsVideo &&
1996 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1997 m_bVideoHdrEnabled = true;
1998 else
1999 m_bVideoHdrEnabled = false;
2000
2001
Thierry Strudel3d639192016-09-09 11:52:26 -07002002 /* Check if num_streams is sane */
2003 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2004 rawStreamCnt > MAX_RAW_STREAMS ||
2005 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2006 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2007 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2008 pthread_mutex_unlock(&mMutex);
2009 return -EINVAL;
2010 }
2011 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002012 if (isZsl && m_bIs4KVideo) {
2013 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002014 pthread_mutex_unlock(&mMutex);
2015 return -EINVAL;
2016 }
2017 /* Check if stream sizes are sane */
2018 if (numStreamsOnEncoder > 2) {
2019 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2020 pthread_mutex_unlock(&mMutex);
2021 return -EINVAL;
2022 } else if (1 < numStreamsOnEncoder){
2023 bUseCommonFeatureMask = true;
2024 LOGH("Multiple streams above max viewfinder size, common mask needed");
2025 }
2026
2027 /* Check if BLOB size is greater than 4k in 4k recording case */
2028 if (m_bIs4KVideo && bJpegExceeds4K) {
2029 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2030 pthread_mutex_unlock(&mMutex);
2031 return -EINVAL;
2032 }
2033
Emilian Peev7650c122017-01-19 08:24:33 -08002034 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2035 depthPresent) {
2036 LOGE("HAL doesn't support depth streams in HFR mode!");
2037 pthread_mutex_unlock(&mMutex);
2038 return -EINVAL;
2039 }
2040
Thierry Strudel3d639192016-09-09 11:52:26 -07002041 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2042 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2043 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2044 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2045 // configurations:
2046 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2047 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2048 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2049 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2050 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2051 __func__);
2052 pthread_mutex_unlock(&mMutex);
2053 return -EINVAL;
2054 }
2055
2056 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2057 // the YUV stream's size is greater or equal to the JPEG size, set common
2058 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2059 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2060 jpegSize.width, jpegSize.height) &&
2061 largeYuv888Size.width > jpegSize.width &&
2062 largeYuv888Size.height > jpegSize.height) {
2063 bYuv888OverrideJpeg = true;
2064 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2065 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2066 }
2067
2068 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2069 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2070 commonFeatureMask);
2071 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2072 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2073
2074 rc = validateStreamDimensions(streamList);
2075 if (rc == NO_ERROR) {
2076 rc = validateStreamRotations(streamList);
2077 }
2078 if (rc != NO_ERROR) {
2079 LOGE("Invalid stream configuration requested!");
2080 pthread_mutex_unlock(&mMutex);
2081 return rc;
2082 }
2083
Emilian Peev0f3c3162017-03-15 12:57:46 +00002084 if (1 < pdStatCount) {
2085 LOGE("HAL doesn't support multiple PD streams");
2086 pthread_mutex_unlock(&mMutex);
2087 return -EINVAL;
2088 }
2089
2090 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2091 (1 == pdStatCount)) {
2092 LOGE("HAL doesn't support PD streams in HFR mode!");
2093 pthread_mutex_unlock(&mMutex);
2094 return -EINVAL;
2095 }
2096
Thierry Strudel3d639192016-09-09 11:52:26 -07002097 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2098 for (size_t i = 0; i < streamList->num_streams; i++) {
2099 camera3_stream_t *newStream = streamList->streams[i];
2100 LOGH("newStream type = %d, stream format = %d "
2101 "stream size : %d x %d, stream rotation = %d",
2102 newStream->stream_type, newStream->format,
2103 newStream->width, newStream->height, newStream->rotation);
2104 //if the stream is in the mStreamList validate it
2105 bool stream_exists = false;
2106 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2107 it != mStreamInfo.end(); it++) {
2108 if ((*it)->stream == newStream) {
2109 QCamera3ProcessingChannel *channel =
2110 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2111 stream_exists = true;
2112 if (channel)
2113 delete channel;
2114 (*it)->status = VALID;
2115 (*it)->stream->priv = NULL;
2116 (*it)->channel = NULL;
2117 }
2118 }
2119 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2120 //new stream
2121 stream_info_t* stream_info;
2122 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2123 if (!stream_info) {
2124 LOGE("Could not allocate stream info");
2125 rc = -ENOMEM;
2126 pthread_mutex_unlock(&mMutex);
2127 return rc;
2128 }
2129 stream_info->stream = newStream;
2130 stream_info->status = VALID;
2131 stream_info->channel = NULL;
2132 mStreamInfo.push_back(stream_info);
2133 }
2134 /* Covers Opaque ZSL and API1 F/W ZSL */
2135 if (IS_USAGE_ZSL(newStream->usage)
2136 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2137 if (zslStream != NULL) {
2138 LOGE("Multiple input/reprocess streams requested!");
2139 pthread_mutex_unlock(&mMutex);
2140 return BAD_VALUE;
2141 }
2142 zslStream = newStream;
2143 }
2144 /* Covers YUV reprocess */
2145 if (inputStream != NULL) {
2146 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2147 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2148 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2149 && inputStream->width == newStream->width
2150 && inputStream->height == newStream->height) {
2151 if (zslStream != NULL) {
2152 /* This scenario indicates multiple YUV streams with same size
2153 * as input stream have been requested, since zsl stream handle
2154 * is solely use for the purpose of overriding the size of streams
2155 * which share h/w streams we will just make a guess here as to
2156 * which of the stream is a ZSL stream, this will be refactored
2157 * once we make generic logic for streams sharing encoder output
2158 */
2159 LOGH("Warning, Multiple ip/reprocess streams requested!");
2160 }
2161 zslStream = newStream;
2162 }
2163 }
2164 }
2165
2166 /* If a zsl stream is set, we know that we have configured at least one input or
2167 bidirectional stream */
2168 if (NULL != zslStream) {
2169 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2170 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2171 mInputStreamInfo.format = zslStream->format;
2172 mInputStreamInfo.usage = zslStream->usage;
2173 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2174 mInputStreamInfo.dim.width,
2175 mInputStreamInfo.dim.height,
2176 mInputStreamInfo.format, mInputStreamInfo.usage);
2177 }
2178
2179 cleanAndSortStreamInfo();
2180 if (mMetadataChannel) {
2181 delete mMetadataChannel;
2182 mMetadataChannel = NULL;
2183 }
2184 if (mSupportChannel) {
2185 delete mSupportChannel;
2186 mSupportChannel = NULL;
2187 }
2188
2189 if (mAnalysisChannel) {
2190 delete mAnalysisChannel;
2191 mAnalysisChannel = NULL;
2192 }
2193
2194 if (mDummyBatchChannel) {
2195 delete mDummyBatchChannel;
2196 mDummyBatchChannel = NULL;
2197 }
2198
Emilian Peev7650c122017-01-19 08:24:33 -08002199 if (mDepthChannel) {
2200 mDepthChannel = NULL;
2201 }
2202
Thierry Strudel2896d122017-02-23 19:18:03 -08002203 char is_type_value[PROPERTY_VALUE_MAX];
2204 property_get("persist.camera.is_type", is_type_value, "4");
2205 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2206
Thierry Strudel3d639192016-09-09 11:52:26 -07002207 //Create metadata channel and initialize it
2208 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2209 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2210 gCamCapability[mCameraId]->color_arrangement);
2211 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2212 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002213 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002214 if (mMetadataChannel == NULL) {
2215 LOGE("failed to allocate metadata channel");
2216 rc = -ENOMEM;
2217 pthread_mutex_unlock(&mMutex);
2218 return rc;
2219 }
2220 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2221 if (rc < 0) {
2222 LOGE("metadata channel initialization failed");
2223 delete mMetadataChannel;
2224 mMetadataChannel = NULL;
2225 pthread_mutex_unlock(&mMutex);
2226 return rc;
2227 }
2228
Thierry Strudel2896d122017-02-23 19:18:03 -08002229 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002230 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002231 bool onlyRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002232 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2233 /* Allocate channel objects for the requested streams */
2234 for (size_t i = 0; i < streamList->num_streams; i++) {
2235 camera3_stream_t *newStream = streamList->streams[i];
2236 uint32_t stream_usage = newStream->usage;
2237 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2238 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2239 struct camera_info *p_info = NULL;
2240 pthread_mutex_lock(&gCamLock);
2241 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2242 pthread_mutex_unlock(&gCamLock);
2243 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2244 || IS_USAGE_ZSL(newStream->usage)) &&
2245 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002246 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002247 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002248 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2249 if (bUseCommonFeatureMask)
2250 zsl_ppmask = commonFeatureMask;
2251 else
2252 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002253 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002254 if (numStreamsOnEncoder > 0)
2255 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2256 else
2257 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002258 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002259 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002260 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002261 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002262 LOGH("Input stream configured, reprocess config");
2263 } else {
2264 //for non zsl streams find out the format
2265 switch (newStream->format) {
2266 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2267 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002268 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002269 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2270 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2271 /* add additional features to pp feature mask */
2272 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2273 mStreamConfigInfo.num_streams);
2274
2275 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2276 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2277 CAM_STREAM_TYPE_VIDEO;
2278 if (m_bTnrEnabled && m_bTnrVideo) {
2279 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2280 CAM_QCOM_FEATURE_CPP_TNR;
2281 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2282 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2283 ~CAM_QCOM_FEATURE_CDS;
2284 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002285 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2286 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2287 CAM_QTI_FEATURE_PPEISCORE;
2288 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002289 } else {
2290 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2291 CAM_STREAM_TYPE_PREVIEW;
2292 if (m_bTnrEnabled && m_bTnrPreview) {
2293 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2294 CAM_QCOM_FEATURE_CPP_TNR;
2295 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2296 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2297 ~CAM_QCOM_FEATURE_CDS;
2298 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002299 if(!m_bSwTnrPreview) {
2300 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2301 ~CAM_QTI_FEATURE_SW_TNR;
2302 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002303 padding_info.width_padding = mSurfaceStridePadding;
2304 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002305 previewSize.width = (int32_t)newStream->width;
2306 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002307 }
2308 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2309 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2310 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2311 newStream->height;
2312 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2313 newStream->width;
2314 }
2315 }
2316 break;
2317 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002318 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002319 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2320 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2321 if (bUseCommonFeatureMask)
2322 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2323 commonFeatureMask;
2324 else
2325 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2326 CAM_QCOM_FEATURE_NONE;
2327 } else {
2328 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2329 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2330 }
2331 break;
2332 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002333 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002334 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2335 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2336 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2337 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2338 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002339 /* Remove rotation if it is not supported
2340 for 4K LiveVideo snapshot case (online processing) */
2341 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2342 CAM_QCOM_FEATURE_ROTATION)) {
2343 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2344 &= ~CAM_QCOM_FEATURE_ROTATION;
2345 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002346 } else {
2347 if (bUseCommonFeatureMask &&
2348 isOnEncoder(maxViewfinderSize, newStream->width,
2349 newStream->height)) {
2350 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2351 } else {
2352 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2353 }
2354 }
2355 if (isZsl) {
2356 if (zslStream) {
2357 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2358 (int32_t)zslStream->width;
2359 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2360 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002361 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2362 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002363 } else {
2364 LOGE("Error, No ZSL stream identified");
2365 pthread_mutex_unlock(&mMutex);
2366 return -EINVAL;
2367 }
2368 } else if (m_bIs4KVideo) {
2369 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2370 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2371 } else if (bYuv888OverrideJpeg) {
2372 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2373 (int32_t)largeYuv888Size.width;
2374 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2375 (int32_t)largeYuv888Size.height;
2376 }
2377 break;
2378 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2379 case HAL_PIXEL_FORMAT_RAW16:
2380 case HAL_PIXEL_FORMAT_RAW10:
2381 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2382 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2383 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002384 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2385 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2386 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2387 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2388 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2389 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2390 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2391 gCamCapability[mCameraId]->dt[mPDIndex];
2392 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2393 gCamCapability[mCameraId]->vc[mPDIndex];
2394 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002395 break;
2396 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002397 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002398 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2399 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2400 break;
2401 }
2402 }
2403
2404 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2405 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2406 gCamCapability[mCameraId]->color_arrangement);
2407
2408 if (newStream->priv == NULL) {
2409 //New stream, construct channel
2410 switch (newStream->stream_type) {
2411 case CAMERA3_STREAM_INPUT:
2412 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2413 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2414 break;
2415 case CAMERA3_STREAM_BIDIRECTIONAL:
2416 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2417 GRALLOC_USAGE_HW_CAMERA_WRITE;
2418 break;
2419 case CAMERA3_STREAM_OUTPUT:
2420 /* For video encoding stream, set read/write rarely
2421 * flag so that they may be set to un-cached */
2422 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2423 newStream->usage |=
2424 (GRALLOC_USAGE_SW_READ_RARELY |
2425 GRALLOC_USAGE_SW_WRITE_RARELY |
2426 GRALLOC_USAGE_HW_CAMERA_WRITE);
2427 else if (IS_USAGE_ZSL(newStream->usage))
2428 {
2429 LOGD("ZSL usage flag skipping");
2430 }
2431 else if (newStream == zslStream
2432 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2433 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2434 } else
2435 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2436 break;
2437 default:
2438 LOGE("Invalid stream_type %d", newStream->stream_type);
2439 break;
2440 }
2441
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002442 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002443 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2444 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2445 QCamera3ProcessingChannel *channel = NULL;
2446 switch (newStream->format) {
2447 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2448 if ((newStream->usage &
2449 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2450 (streamList->operation_mode ==
2451 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2452 ) {
2453 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2454 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002455 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002456 this,
2457 newStream,
2458 (cam_stream_type_t)
2459 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2460 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2461 mMetadataChannel,
2462 0); //heap buffers are not required for HFR video channel
2463 if (channel == NULL) {
2464 LOGE("allocation of channel failed");
2465 pthread_mutex_unlock(&mMutex);
2466 return -ENOMEM;
2467 }
2468 //channel->getNumBuffers() will return 0 here so use
2469 //MAX_INFLIGH_HFR_REQUESTS
2470 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2471 newStream->priv = channel;
2472 LOGI("num video buffers in HFR mode: %d",
2473 MAX_INFLIGHT_HFR_REQUESTS);
2474 } else {
2475 /* Copy stream contents in HFR preview only case to create
2476 * dummy batch channel so that sensor streaming is in
2477 * HFR mode */
2478 if (!m_bIsVideo && (streamList->operation_mode ==
2479 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2480 mDummyBatchStream = *newStream;
2481 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002482 int bufferCount = MAX_INFLIGHT_REQUESTS;
2483 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2484 CAM_STREAM_TYPE_VIDEO) {
2485 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2486 bufferCount = MAX_VIDEO_BUFFERS;
2487 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002488 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2489 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002490 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002491 this,
2492 newStream,
2493 (cam_stream_type_t)
2494 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2495 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2496 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002497 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002498 if (channel == NULL) {
2499 LOGE("allocation of channel failed");
2500 pthread_mutex_unlock(&mMutex);
2501 return -ENOMEM;
2502 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002503 /* disable UBWC for preview, though supported,
2504 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002505 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002506 (previewSize.width == (int32_t)videoWidth)&&
2507 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002508 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002509 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002510 channel->setUBWCEnabled(forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002511 newStream->max_buffers = channel->getNumBuffers();
2512 newStream->priv = channel;
2513 }
2514 break;
2515 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2516 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2517 mChannelHandle,
2518 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002519 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002520 this,
2521 newStream,
2522 (cam_stream_type_t)
2523 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2524 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2525 mMetadataChannel);
2526 if (channel == NULL) {
2527 LOGE("allocation of YUV channel failed");
2528 pthread_mutex_unlock(&mMutex);
2529 return -ENOMEM;
2530 }
2531 newStream->max_buffers = channel->getNumBuffers();
2532 newStream->priv = channel;
2533 break;
2534 }
2535 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2536 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002537 case HAL_PIXEL_FORMAT_RAW10: {
2538 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2539 (HAL_DATASPACE_DEPTH != newStream->data_space))
2540 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002541 mRawChannel = new QCamera3RawChannel(
2542 mCameraHandle->camera_handle, mChannelHandle,
2543 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002544 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002545 this, newStream,
2546 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002547 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002548 if (mRawChannel == NULL) {
2549 LOGE("allocation of raw channel failed");
2550 pthread_mutex_unlock(&mMutex);
2551 return -ENOMEM;
2552 }
2553 newStream->max_buffers = mRawChannel->getNumBuffers();
2554 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2555 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002556 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002557 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002558 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2559 mDepthChannel = new QCamera3DepthChannel(
2560 mCameraHandle->camera_handle, mChannelHandle,
2561 mCameraHandle->ops, NULL, NULL, &padding_info,
2562 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2563 mMetadataChannel);
2564 if (NULL == mDepthChannel) {
2565 LOGE("Allocation of depth channel failed");
2566 pthread_mutex_unlock(&mMutex);
2567 return NO_MEMORY;
2568 }
2569 newStream->priv = mDepthChannel;
2570 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2571 } else {
2572 // Max live snapshot inflight buffer is 1. This is to mitigate
2573 // frame drop issues for video snapshot. The more buffers being
2574 // allocated, the more frame drops there are.
2575 mPictureChannel = new QCamera3PicChannel(
2576 mCameraHandle->camera_handle, mChannelHandle,
2577 mCameraHandle->ops, captureResultCb,
2578 setBufferErrorStatus, &padding_info, this, newStream,
2579 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2580 m_bIs4KVideo, isZsl, mMetadataChannel,
2581 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2582 if (mPictureChannel == NULL) {
2583 LOGE("allocation of channel failed");
2584 pthread_mutex_unlock(&mMutex);
2585 return -ENOMEM;
2586 }
2587 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2588 newStream->max_buffers = mPictureChannel->getNumBuffers();
2589 mPictureChannel->overrideYuvSize(
2590 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2591 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002592 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002593 break;
2594
2595 default:
2596 LOGE("not a supported format 0x%x", newStream->format);
2597 break;
2598 }
2599 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2600 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2601 } else {
2602 LOGE("Error, Unknown stream type");
2603 pthread_mutex_unlock(&mMutex);
2604 return -EINVAL;
2605 }
2606
2607 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002608 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2609 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002610 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002611 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002612 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2613 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2614 }
2615 }
2616
2617 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2618 it != mStreamInfo.end(); it++) {
2619 if ((*it)->stream == newStream) {
2620 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2621 break;
2622 }
2623 }
2624 } else {
2625 // Channel already exists for this stream
2626 // Do nothing for now
2627 }
2628 padding_info = gCamCapability[mCameraId]->padding_info;
2629
Emilian Peev7650c122017-01-19 08:24:33 -08002630 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002631 * since there is no real stream associated with it
2632 */
Emilian Peev7650c122017-01-19 08:24:33 -08002633 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002634 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2635 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002636 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002637 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002638 }
2639
Thierry Strudel2896d122017-02-23 19:18:03 -08002640 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2641 onlyRaw = false;
2642 }
2643
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002644 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002645 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002646 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002647 cam_analysis_info_t analysisInfo;
2648 int32_t ret = NO_ERROR;
2649 ret = mCommon.getAnalysisInfo(
2650 FALSE,
2651 analysisFeatureMask,
2652 &analysisInfo);
2653 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002654 cam_color_filter_arrangement_t analysis_color_arrangement =
2655 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2656 CAM_FILTER_ARRANGEMENT_Y :
2657 gCamCapability[mCameraId]->color_arrangement);
2658 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2659 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002660 cam_dimension_t analysisDim;
2661 analysisDim = mCommon.getMatchingDimension(previewSize,
2662 analysisInfo.analysis_recommended_res);
2663
2664 mAnalysisChannel = new QCamera3SupportChannel(
2665 mCameraHandle->camera_handle,
2666 mChannelHandle,
2667 mCameraHandle->ops,
2668 &analysisInfo.analysis_padding_info,
2669 analysisFeatureMask,
2670 CAM_STREAM_TYPE_ANALYSIS,
2671 &analysisDim,
2672 (analysisInfo.analysis_format
2673 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2674 : CAM_FORMAT_YUV_420_NV21),
2675 analysisInfo.hw_analysis_supported,
2676 gCamCapability[mCameraId]->color_arrangement,
2677 this,
2678 0); // force buffer count to 0
2679 } else {
2680 LOGW("getAnalysisInfo failed, ret = %d", ret);
2681 }
2682 if (!mAnalysisChannel) {
2683 LOGW("Analysis channel cannot be created");
2684 }
2685 }
2686
Thierry Strudel3d639192016-09-09 11:52:26 -07002687 //RAW DUMP channel
2688 if (mEnableRawDump && isRawStreamRequested == false){
2689 cam_dimension_t rawDumpSize;
2690 rawDumpSize = getMaxRawSize(mCameraId);
2691 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2692 setPAAFSupport(rawDumpFeatureMask,
2693 CAM_STREAM_TYPE_RAW,
2694 gCamCapability[mCameraId]->color_arrangement);
2695 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2696 mChannelHandle,
2697 mCameraHandle->ops,
2698 rawDumpSize,
2699 &padding_info,
2700 this, rawDumpFeatureMask);
2701 if (!mRawDumpChannel) {
2702 LOGE("Raw Dump channel cannot be created");
2703 pthread_mutex_unlock(&mMutex);
2704 return -ENOMEM;
2705 }
2706 }
2707
Thierry Strudel3d639192016-09-09 11:52:26 -07002708 if (mAnalysisChannel) {
2709 cam_analysis_info_t analysisInfo;
2710 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2711 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2712 CAM_STREAM_TYPE_ANALYSIS;
2713 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2714 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002715 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002716 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2717 &analysisInfo);
2718 if (rc != NO_ERROR) {
2719 LOGE("getAnalysisInfo failed, ret = %d", rc);
2720 pthread_mutex_unlock(&mMutex);
2721 return rc;
2722 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002723 cam_color_filter_arrangement_t analysis_color_arrangement =
2724 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2725 CAM_FILTER_ARRANGEMENT_Y :
2726 gCamCapability[mCameraId]->color_arrangement);
2727 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2728 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2729 analysis_color_arrangement);
2730
Thierry Strudel3d639192016-09-09 11:52:26 -07002731 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002732 mCommon.getMatchingDimension(previewSize,
2733 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002734 mStreamConfigInfo.num_streams++;
2735 }
2736
Thierry Strudel2896d122017-02-23 19:18:03 -08002737 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002738 cam_analysis_info_t supportInfo;
2739 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2740 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2741 setPAAFSupport(callbackFeatureMask,
2742 CAM_STREAM_TYPE_CALLBACK,
2743 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002744 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002745 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002746 if (ret != NO_ERROR) {
2747 /* Ignore the error for Mono camera
2748 * because the PAAF bit mask is only set
2749 * for CAM_STREAM_TYPE_ANALYSIS stream type
2750 */
2751 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2752 LOGW("getAnalysisInfo failed, ret = %d", ret);
2753 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002754 }
2755 mSupportChannel = new QCamera3SupportChannel(
2756 mCameraHandle->camera_handle,
2757 mChannelHandle,
2758 mCameraHandle->ops,
2759 &gCamCapability[mCameraId]->padding_info,
2760 callbackFeatureMask,
2761 CAM_STREAM_TYPE_CALLBACK,
2762 &QCamera3SupportChannel::kDim,
2763 CAM_FORMAT_YUV_420_NV21,
2764 supportInfo.hw_analysis_supported,
2765 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002766 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002767 if (!mSupportChannel) {
2768 LOGE("dummy channel cannot be created");
2769 pthread_mutex_unlock(&mMutex);
2770 return -ENOMEM;
2771 }
2772 }
2773
2774 if (mSupportChannel) {
2775 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2776 QCamera3SupportChannel::kDim;
2777 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2778 CAM_STREAM_TYPE_CALLBACK;
2779 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2780 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2781 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2782 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2783 gCamCapability[mCameraId]->color_arrangement);
2784 mStreamConfigInfo.num_streams++;
2785 }
2786
2787 if (mRawDumpChannel) {
2788 cam_dimension_t rawSize;
2789 rawSize = getMaxRawSize(mCameraId);
2790 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2791 rawSize;
2792 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2793 CAM_STREAM_TYPE_RAW;
2794 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2795 CAM_QCOM_FEATURE_NONE;
2796 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2797 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2798 gCamCapability[mCameraId]->color_arrangement);
2799 mStreamConfigInfo.num_streams++;
2800 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002801
2802 if (mHdrPlusRawSrcChannel) {
2803 cam_dimension_t rawSize;
2804 rawSize = getMaxRawSize(mCameraId);
2805 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2806 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2807 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2808 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2809 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2810 gCamCapability[mCameraId]->color_arrangement);
2811 mStreamConfigInfo.num_streams++;
2812 }
2813
Thierry Strudel3d639192016-09-09 11:52:26 -07002814 /* In HFR mode, if video stream is not added, create a dummy channel so that
2815 * ISP can create a batch mode even for preview only case. This channel is
2816 * never 'start'ed (no stream-on), it is only 'initialized' */
2817 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2818 !m_bIsVideo) {
2819 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2820 setPAAFSupport(dummyFeatureMask,
2821 CAM_STREAM_TYPE_VIDEO,
2822 gCamCapability[mCameraId]->color_arrangement);
2823 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2824 mChannelHandle,
2825 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002826 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002827 this,
2828 &mDummyBatchStream,
2829 CAM_STREAM_TYPE_VIDEO,
2830 dummyFeatureMask,
2831 mMetadataChannel);
2832 if (NULL == mDummyBatchChannel) {
2833 LOGE("creation of mDummyBatchChannel failed."
2834 "Preview will use non-hfr sensor mode ");
2835 }
2836 }
2837 if (mDummyBatchChannel) {
2838 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2839 mDummyBatchStream.width;
2840 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2841 mDummyBatchStream.height;
2842 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2843 CAM_STREAM_TYPE_VIDEO;
2844 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2845 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2846 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2847 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2848 gCamCapability[mCameraId]->color_arrangement);
2849 mStreamConfigInfo.num_streams++;
2850 }
2851
2852 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2853 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002854 m_bIs4KVideo ? 0 :
2855 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002856
2857 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2858 for (pendingRequestIterator i = mPendingRequestsList.begin();
2859 i != mPendingRequestsList.end();) {
2860 i = erasePendingRequest(i);
2861 }
2862 mPendingFrameDropList.clear();
2863 // Initialize/Reset the pending buffers list
2864 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2865 req.mPendingBufferList.clear();
2866 }
2867 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2868
Thierry Strudel3d639192016-09-09 11:52:26 -07002869 mCurJpegMeta.clear();
2870 //Get min frame duration for this streams configuration
2871 deriveMinFrameDuration();
2872
Chien-Yu Chenee335912017-02-09 17:53:20 -08002873 mFirstPreviewIntentSeen = false;
2874
2875 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002876 {
2877 Mutex::Autolock l(gHdrPlusClientLock);
2878 disableHdrPlusModeLocked();
2879 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002880
Thierry Strudel3d639192016-09-09 11:52:26 -07002881 // Update state
2882 mState = CONFIGURED;
2883
2884 pthread_mutex_unlock(&mMutex);
2885
2886 return rc;
2887}
2888
2889/*===========================================================================
2890 * FUNCTION : validateCaptureRequest
2891 *
2892 * DESCRIPTION: validate a capture request from camera service
2893 *
2894 * PARAMETERS :
2895 * @request : request from framework to process
2896 *
2897 * RETURN :
2898 *
2899 *==========================================================================*/
2900int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002901 camera3_capture_request_t *request,
2902 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002903{
2904 ssize_t idx = 0;
2905 const camera3_stream_buffer_t *b;
2906 CameraMetadata meta;
2907
2908 /* Sanity check the request */
2909 if (request == NULL) {
2910 LOGE("NULL capture request");
2911 return BAD_VALUE;
2912 }
2913
2914 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2915 /*settings cannot be null for the first request*/
2916 return BAD_VALUE;
2917 }
2918
2919 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002920 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2921 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002922 LOGE("Request %d: No output buffers provided!",
2923 __FUNCTION__, frameNumber);
2924 return BAD_VALUE;
2925 }
2926 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2927 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2928 request->num_output_buffers, MAX_NUM_STREAMS);
2929 return BAD_VALUE;
2930 }
2931 if (request->input_buffer != NULL) {
2932 b = request->input_buffer;
2933 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2934 LOGE("Request %d: Buffer %ld: Status not OK!",
2935 frameNumber, (long)idx);
2936 return BAD_VALUE;
2937 }
2938 if (b->release_fence != -1) {
2939 LOGE("Request %d: Buffer %ld: Has a release fence!",
2940 frameNumber, (long)idx);
2941 return BAD_VALUE;
2942 }
2943 if (b->buffer == NULL) {
2944 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2945 frameNumber, (long)idx);
2946 return BAD_VALUE;
2947 }
2948 }
2949
2950 // Validate all buffers
2951 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002952 if (b == NULL) {
2953 return BAD_VALUE;
2954 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002955 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002956 QCamera3ProcessingChannel *channel =
2957 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2958 if (channel == NULL) {
2959 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2960 frameNumber, (long)idx);
2961 return BAD_VALUE;
2962 }
2963 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2964 LOGE("Request %d: Buffer %ld: Status not OK!",
2965 frameNumber, (long)idx);
2966 return BAD_VALUE;
2967 }
2968 if (b->release_fence != -1) {
2969 LOGE("Request %d: Buffer %ld: Has a release fence!",
2970 frameNumber, (long)idx);
2971 return BAD_VALUE;
2972 }
2973 if (b->buffer == NULL) {
2974 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2975 frameNumber, (long)idx);
2976 return BAD_VALUE;
2977 }
2978 if (*(b->buffer) == NULL) {
2979 LOGE("Request %d: Buffer %ld: NULL private handle!",
2980 frameNumber, (long)idx);
2981 return BAD_VALUE;
2982 }
2983 idx++;
2984 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002985 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002986 return NO_ERROR;
2987}
2988
2989/*===========================================================================
2990 * FUNCTION : deriveMinFrameDuration
2991 *
2992 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2993 * on currently configured streams.
2994 *
2995 * PARAMETERS : NONE
2996 *
2997 * RETURN : NONE
2998 *
2999 *==========================================================================*/
3000void QCamera3HardwareInterface::deriveMinFrameDuration()
3001{
3002 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3003
3004 maxJpegDim = 0;
3005 maxProcessedDim = 0;
3006 maxRawDim = 0;
3007
3008 // Figure out maximum jpeg, processed, and raw dimensions
3009 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3010 it != mStreamInfo.end(); it++) {
3011
3012 // Input stream doesn't have valid stream_type
3013 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3014 continue;
3015
3016 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3017 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3018 if (dimension > maxJpegDim)
3019 maxJpegDim = dimension;
3020 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3021 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3022 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3023 if (dimension > maxRawDim)
3024 maxRawDim = dimension;
3025 } else {
3026 if (dimension > maxProcessedDim)
3027 maxProcessedDim = dimension;
3028 }
3029 }
3030
3031 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3032 MAX_SIZES_CNT);
3033
3034 //Assume all jpeg dimensions are in processed dimensions.
3035 if (maxJpegDim > maxProcessedDim)
3036 maxProcessedDim = maxJpegDim;
3037 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3038 if (maxProcessedDim > maxRawDim) {
3039 maxRawDim = INT32_MAX;
3040
3041 for (size_t i = 0; i < count; i++) {
3042 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3043 gCamCapability[mCameraId]->raw_dim[i].height;
3044 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3045 maxRawDim = dimension;
3046 }
3047 }
3048
3049 //Find minimum durations for processed, jpeg, and raw
3050 for (size_t i = 0; i < count; i++) {
3051 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3052 gCamCapability[mCameraId]->raw_dim[i].height) {
3053 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3054 break;
3055 }
3056 }
3057 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3058 for (size_t i = 0; i < count; i++) {
3059 if (maxProcessedDim ==
3060 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3061 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3062 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3063 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3064 break;
3065 }
3066 }
3067}
3068
3069/*===========================================================================
3070 * FUNCTION : getMinFrameDuration
3071 *
3072 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3073 * and current request configuration.
3074 *
3075 * PARAMETERS : @request: requset sent by the frameworks
3076 *
3077 * RETURN : min farme duration for a particular request
3078 *
3079 *==========================================================================*/
3080int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3081{
3082 bool hasJpegStream = false;
3083 bool hasRawStream = false;
3084 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3085 const camera3_stream_t *stream = request->output_buffers[i].stream;
3086 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3087 hasJpegStream = true;
3088 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3089 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3090 stream->format == HAL_PIXEL_FORMAT_RAW16)
3091 hasRawStream = true;
3092 }
3093
3094 if (!hasJpegStream)
3095 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3096 else
3097 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3098}
3099
3100/*===========================================================================
3101 * FUNCTION : handleBuffersDuringFlushLock
3102 *
3103 * DESCRIPTION: Account for buffers returned from back-end during flush
3104 * This function is executed while mMutex is held by the caller.
3105 *
3106 * PARAMETERS :
3107 * @buffer: image buffer for the callback
3108 *
3109 * RETURN :
3110 *==========================================================================*/
3111void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3112{
3113 bool buffer_found = false;
3114 for (List<PendingBuffersInRequest>::iterator req =
3115 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3116 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3117 for (List<PendingBufferInfo>::iterator i =
3118 req->mPendingBufferList.begin();
3119 i != req->mPendingBufferList.end(); i++) {
3120 if (i->buffer == buffer->buffer) {
3121 mPendingBuffersMap.numPendingBufsAtFlush--;
3122 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3123 buffer->buffer, req->frame_number,
3124 mPendingBuffersMap.numPendingBufsAtFlush);
3125 buffer_found = true;
3126 break;
3127 }
3128 }
3129 if (buffer_found) {
3130 break;
3131 }
3132 }
3133 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3134 //signal the flush()
3135 LOGD("All buffers returned to HAL. Continue flush");
3136 pthread_cond_signal(&mBuffersCond);
3137 }
3138}
3139
Thierry Strudel3d639192016-09-09 11:52:26 -07003140/*===========================================================================
3141 * FUNCTION : handleBatchMetadata
3142 *
3143 * DESCRIPTION: Handles metadata buffer callback in batch mode
3144 *
3145 * PARAMETERS : @metadata_buf: metadata buffer
3146 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3147 * the meta buf in this method
3148 *
3149 * RETURN :
3150 *
3151 *==========================================================================*/
3152void QCamera3HardwareInterface::handleBatchMetadata(
3153 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3154{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003155 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003156
3157 if (NULL == metadata_buf) {
3158 LOGE("metadata_buf is NULL");
3159 return;
3160 }
3161 /* In batch mode, the metdata will contain the frame number and timestamp of
3162 * the last frame in the batch. Eg: a batch containing buffers from request
3163 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3164 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3165 * multiple process_capture_results */
3166 metadata_buffer_t *metadata =
3167 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3168 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3169 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3170 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3171 uint32_t frame_number = 0, urgent_frame_number = 0;
3172 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3173 bool invalid_metadata = false;
3174 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3175 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003176 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003177
3178 int32_t *p_frame_number_valid =
3179 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3180 uint32_t *p_frame_number =
3181 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3182 int64_t *p_capture_time =
3183 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3184 int32_t *p_urgent_frame_number_valid =
3185 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3186 uint32_t *p_urgent_frame_number =
3187 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3188
3189 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3190 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3191 (NULL == p_urgent_frame_number)) {
3192 LOGE("Invalid metadata");
3193 invalid_metadata = true;
3194 } else {
3195 frame_number_valid = *p_frame_number_valid;
3196 last_frame_number = *p_frame_number;
3197 last_frame_capture_time = *p_capture_time;
3198 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3199 last_urgent_frame_number = *p_urgent_frame_number;
3200 }
3201
3202 /* In batchmode, when no video buffers are requested, set_parms are sent
3203 * for every capture_request. The difference between consecutive urgent
3204 * frame numbers and frame numbers should be used to interpolate the
3205 * corresponding frame numbers and time stamps */
3206 pthread_mutex_lock(&mMutex);
3207 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003208 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3209 if(idx < 0) {
3210 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3211 last_urgent_frame_number);
3212 mState = ERROR;
3213 pthread_mutex_unlock(&mMutex);
3214 return;
3215 }
3216 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003217 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3218 first_urgent_frame_number;
3219
3220 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3221 urgent_frame_number_valid,
3222 first_urgent_frame_number, last_urgent_frame_number);
3223 }
3224
3225 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003226 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3227 if(idx < 0) {
3228 LOGE("Invalid frame number received: %d. Irrecoverable error",
3229 last_frame_number);
3230 mState = ERROR;
3231 pthread_mutex_unlock(&mMutex);
3232 return;
3233 }
3234 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003235 frameNumDiff = last_frame_number + 1 -
3236 first_frame_number;
3237 mPendingBatchMap.removeItem(last_frame_number);
3238
3239 LOGD("frm: valid: %d frm_num: %d - %d",
3240 frame_number_valid,
3241 first_frame_number, last_frame_number);
3242
3243 }
3244 pthread_mutex_unlock(&mMutex);
3245
3246 if (urgent_frame_number_valid || frame_number_valid) {
3247 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3248 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3249 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3250 urgentFrameNumDiff, last_urgent_frame_number);
3251 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3252 LOGE("frameNumDiff: %d frameNum: %d",
3253 frameNumDiff, last_frame_number);
3254 }
3255
3256 for (size_t i = 0; i < loopCount; i++) {
3257 /* handleMetadataWithLock is called even for invalid_metadata for
3258 * pipeline depth calculation */
3259 if (!invalid_metadata) {
3260 /* Infer frame number. Batch metadata contains frame number of the
3261 * last frame */
3262 if (urgent_frame_number_valid) {
3263 if (i < urgentFrameNumDiff) {
3264 urgent_frame_number =
3265 first_urgent_frame_number + i;
3266 LOGD("inferred urgent frame_number: %d",
3267 urgent_frame_number);
3268 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3269 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3270 } else {
3271 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3272 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3273 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3274 }
3275 }
3276
3277 /* Infer frame number. Batch metadata contains frame number of the
3278 * last frame */
3279 if (frame_number_valid) {
3280 if (i < frameNumDiff) {
3281 frame_number = first_frame_number + i;
3282 LOGD("inferred frame_number: %d", frame_number);
3283 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3284 CAM_INTF_META_FRAME_NUMBER, frame_number);
3285 } else {
3286 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3287 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3288 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3289 }
3290 }
3291
3292 if (last_frame_capture_time) {
3293 //Infer timestamp
3294 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003295 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003296 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003297 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003298 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3299 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3300 LOGD("batch capture_time: %lld, capture_time: %lld",
3301 last_frame_capture_time, capture_time);
3302 }
3303 }
3304 pthread_mutex_lock(&mMutex);
3305 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003306 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003307 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3308 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003309 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003310 pthread_mutex_unlock(&mMutex);
3311 }
3312
3313 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003314 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003315 mMetadataChannel->bufDone(metadata_buf);
3316 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003317 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003318 }
3319}
3320
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003321void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3322 camera3_error_msg_code_t errorCode)
3323{
3324 camera3_notify_msg_t notify_msg;
3325 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3326 notify_msg.type = CAMERA3_MSG_ERROR;
3327 notify_msg.message.error.error_code = errorCode;
3328 notify_msg.message.error.error_stream = NULL;
3329 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003330 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003331
3332 return;
3333}
Thierry Strudel3d639192016-09-09 11:52:26 -07003334/*===========================================================================
3335 * FUNCTION : handleMetadataWithLock
3336 *
3337 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3338 *
3339 * PARAMETERS : @metadata_buf: metadata buffer
3340 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3341 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003342 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3343 * last urgent metadata in a batch. Always true for non-batch mode
3344 * @lastMetadataInBatch: Boolean to indicate whether this is the
3345 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003346 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3347 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003348 *
3349 * RETURN :
3350 *
3351 *==========================================================================*/
3352void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003353 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003354 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3355 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003356{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003357 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003358 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3359 //during flush do not send metadata from this thread
3360 LOGD("not sending metadata during flush or when mState is error");
3361 if (free_and_bufdone_meta_buf) {
3362 mMetadataChannel->bufDone(metadata_buf);
3363 free(metadata_buf);
3364 }
3365 return;
3366 }
3367
3368 //not in flush
3369 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3370 int32_t frame_number_valid, urgent_frame_number_valid;
3371 uint32_t frame_number, urgent_frame_number;
3372 int64_t capture_time;
3373 nsecs_t currentSysTime;
3374
3375 int32_t *p_frame_number_valid =
3376 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3377 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3378 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3379 int32_t *p_urgent_frame_number_valid =
3380 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3381 uint32_t *p_urgent_frame_number =
3382 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3383 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3384 metadata) {
3385 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3386 *p_frame_number_valid, *p_frame_number);
3387 }
3388
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003389 camera_metadata_t *resultMetadata = nullptr;
3390
Thierry Strudel3d639192016-09-09 11:52:26 -07003391 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3392 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3393 LOGE("Invalid metadata");
3394 if (free_and_bufdone_meta_buf) {
3395 mMetadataChannel->bufDone(metadata_buf);
3396 free(metadata_buf);
3397 }
3398 goto done_metadata;
3399 }
3400 frame_number_valid = *p_frame_number_valid;
3401 frame_number = *p_frame_number;
3402 capture_time = *p_capture_time;
3403 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3404 urgent_frame_number = *p_urgent_frame_number;
3405 currentSysTime = systemTime(CLOCK_MONOTONIC);
3406
3407 // Detect if buffers from any requests are overdue
3408 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003409 int64_t timeout;
3410 {
3411 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3412 // If there is a pending HDR+ request, the following requests may be blocked until the
3413 // HDR+ request is done. So allow a longer timeout.
3414 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3415 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3416 }
3417
3418 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003419 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003420 assert(missed.stream->priv);
3421 if (missed.stream->priv) {
3422 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3423 assert(ch->mStreams[0]);
3424 if (ch->mStreams[0]) {
3425 LOGE("Cancel missing frame = %d, buffer = %p,"
3426 "stream type = %d, stream format = %d",
3427 req.frame_number, missed.buffer,
3428 ch->mStreams[0]->getMyType(), missed.stream->format);
3429 ch->timeoutFrame(req.frame_number);
3430 }
3431 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003432 }
3433 }
3434 }
3435 //Partial result on process_capture_result for timestamp
3436 if (urgent_frame_number_valid) {
3437 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3438 urgent_frame_number, capture_time);
3439
3440 //Recieved an urgent Frame Number, handle it
3441 //using partial results
3442 for (pendingRequestIterator i =
3443 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3444 LOGD("Iterator Frame = %d urgent frame = %d",
3445 i->frame_number, urgent_frame_number);
3446
3447 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3448 (i->partial_result_cnt == 0)) {
3449 LOGE("Error: HAL missed urgent metadata for frame number %d",
3450 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003451 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003452 }
3453
3454 if (i->frame_number == urgent_frame_number &&
3455 i->bUrgentReceived == 0) {
3456
3457 camera3_capture_result_t result;
3458 memset(&result, 0, sizeof(camera3_capture_result_t));
3459
3460 i->partial_result_cnt++;
3461 i->bUrgentReceived = 1;
3462 // Extract 3A metadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003463 result.result = translateCbUrgentMetadataToResultMetadata(
3464 metadata, lastUrgentMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003465 // Populate metadata result
3466 result.frame_number = urgent_frame_number;
3467 result.num_output_buffers = 0;
3468 result.output_buffers = NULL;
3469 result.partial_result = i->partial_result_cnt;
3470
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003471 {
3472 Mutex::Autolock l(gHdrPlusClientLock);
3473 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3474 // Notify HDR+ client about the partial metadata.
3475 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3476 result.partial_result == PARTIAL_RESULT_COUNT);
3477 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003478 }
3479
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003480 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003481 LOGD("urgent frame_number = %u, capture_time = %lld",
3482 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003483 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3484 // Instant AEC settled for this frame.
3485 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3486 mInstantAECSettledFrameNumber = urgent_frame_number;
3487 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003488 free_camera_metadata((camera_metadata_t *)result.result);
3489 break;
3490 }
3491 }
3492 }
3493
3494 if (!frame_number_valid) {
3495 LOGD("Not a valid normal frame number, used as SOF only");
3496 if (free_and_bufdone_meta_buf) {
3497 mMetadataChannel->bufDone(metadata_buf);
3498 free(metadata_buf);
3499 }
3500 goto done_metadata;
3501 }
3502 LOGH("valid frame_number = %u, capture_time = %lld",
3503 frame_number, capture_time);
3504
Emilian Peev7650c122017-01-19 08:24:33 -08003505 if (metadata->is_depth_data_valid) {
3506 handleDepthDataLocked(metadata->depth_data, frame_number);
3507 }
3508
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003509 // Check whether any stream buffer corresponding to this is dropped or not
3510 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3511 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3512 for (auto & pendingRequest : mPendingRequestsList) {
3513 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3514 mInstantAECSettledFrameNumber)) {
3515 camera3_notify_msg_t notify_msg = {};
3516 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003517 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003518 QCamera3ProcessingChannel *channel =
3519 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003520 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003521 if (p_cam_frame_drop) {
3522 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003523 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003524 // Got the stream ID for drop frame.
3525 dropFrame = true;
3526 break;
3527 }
3528 }
3529 } else {
3530 // This is instant AEC case.
3531 // For instant AEC drop the stream untill AEC is settled.
3532 dropFrame = true;
3533 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003534
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003535 if (dropFrame) {
3536 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3537 if (p_cam_frame_drop) {
3538 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003539 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003540 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003541 } else {
3542 // For instant AEC, inform frame drop and frame number
3543 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3544 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003545 pendingRequest.frame_number, streamID,
3546 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003547 }
3548 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003549 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003550 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003551 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003552 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003553 if (p_cam_frame_drop) {
3554 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003555 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003556 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003557 } else {
3558 // For instant AEC, inform frame drop and frame number
3559 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3560 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003561 pendingRequest.frame_number, streamID,
3562 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003563 }
3564 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003565 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003566 PendingFrameDrop.stream_ID = streamID;
3567 // Add the Frame drop info to mPendingFrameDropList
3568 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003569 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003570 }
3571 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003572 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003573
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003574 for (auto & pendingRequest : mPendingRequestsList) {
3575 // Find the pending request with the frame number.
3576 if (pendingRequest.frame_number == frame_number) {
3577 // Update the sensor timestamp.
3578 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003579
Thierry Strudel3d639192016-09-09 11:52:26 -07003580
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003581 /* Set the timestamp in display metadata so that clients aware of
3582 private_handle such as VT can use this un-modified timestamps.
3583 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003584 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003585
Thierry Strudel3d639192016-09-09 11:52:26 -07003586 // Find channel requiring metadata, meaning internal offline postprocess
3587 // is needed.
3588 //TODO: for now, we don't support two streams requiring metadata at the same time.
3589 // (because we are not making copies, and metadata buffer is not reference counted.
3590 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003591 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3592 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003593 if (iter->need_metadata) {
3594 internalPproc = true;
3595 QCamera3ProcessingChannel *channel =
3596 (QCamera3ProcessingChannel *)iter->stream->priv;
3597 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003598 if(p_is_metabuf_queued != NULL) {
3599 *p_is_metabuf_queued = true;
3600 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003601 break;
3602 }
3603 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003604 for (auto itr = pendingRequest.internalRequestList.begin();
3605 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003606 if (itr->need_metadata) {
3607 internalPproc = true;
3608 QCamera3ProcessingChannel *channel =
3609 (QCamera3ProcessingChannel *)itr->stream->priv;
3610 channel->queueReprocMetadata(metadata_buf);
3611 break;
3612 }
3613 }
3614
Thierry Strudel54dc9782017-02-15 12:12:10 -08003615 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003616
3617 bool *enableZsl = nullptr;
3618 if (gExposeEnableZslKey) {
3619 enableZsl = &pendingRequest.enableZsl;
3620 }
3621
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003622 resultMetadata = translateFromHalMetadata(metadata,
3623 pendingRequest.timestamp, pendingRequest.request_id,
3624 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3625 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003626 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003627 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003628 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003629 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003630 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003631 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003632
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003633 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003634
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003635 if (pendingRequest.blob_request) {
3636 //Dump tuning metadata if enabled and available
3637 char prop[PROPERTY_VALUE_MAX];
3638 memset(prop, 0, sizeof(prop));
3639 property_get("persist.camera.dumpmetadata", prop, "0");
3640 int32_t enabled = atoi(prop);
3641 if (enabled && metadata->is_tuning_params_valid) {
3642 dumpMetadataToFile(metadata->tuning_params,
3643 mMetaFrameCount,
3644 enabled,
3645 "Snapshot",
3646 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003647 }
3648 }
3649
3650 if (!internalPproc) {
3651 LOGD("couldn't find need_metadata for this metadata");
3652 // Return metadata buffer
3653 if (free_and_bufdone_meta_buf) {
3654 mMetadataChannel->bufDone(metadata_buf);
3655 free(metadata_buf);
3656 }
3657 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003658
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003659 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003660 }
3661 }
3662
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003663 // Try to send out shutter callbacks and capture results.
3664 handlePendingResultsWithLock(frame_number, resultMetadata);
3665 return;
3666
Thierry Strudel3d639192016-09-09 11:52:26 -07003667done_metadata:
3668 for (pendingRequestIterator i = mPendingRequestsList.begin();
3669 i != mPendingRequestsList.end() ;i++) {
3670 i->pipeline_depth++;
3671 }
3672 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3673 unblockRequestIfNecessary();
3674}
3675
3676/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003677 * FUNCTION : handleDepthDataWithLock
3678 *
3679 * DESCRIPTION: Handles incoming depth data
3680 *
3681 * PARAMETERS : @depthData : Depth data
3682 * @frameNumber: Frame number of the incoming depth data
3683 *
3684 * RETURN :
3685 *
3686 *==========================================================================*/
3687void QCamera3HardwareInterface::handleDepthDataLocked(
3688 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3689 uint32_t currentFrameNumber;
3690 buffer_handle_t *depthBuffer;
3691
3692 if (nullptr == mDepthChannel) {
3693 LOGE("Depth channel not present!");
3694 return;
3695 }
3696
3697 camera3_stream_buffer_t resultBuffer =
3698 {.acquire_fence = -1,
3699 .release_fence = -1,
3700 .status = CAMERA3_BUFFER_STATUS_OK,
3701 .buffer = nullptr,
3702 .stream = mDepthChannel->getStream()};
3703 camera3_capture_result_t result =
3704 {.result = nullptr,
3705 .num_output_buffers = 1,
3706 .output_buffers = &resultBuffer,
3707 .partial_result = 0,
3708 .frame_number = 0};
3709
3710 do {
3711 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3712 if (nullptr == depthBuffer) {
3713 break;
3714 }
3715
3716 result.frame_number = currentFrameNumber;
3717 resultBuffer.buffer = depthBuffer;
3718 if (currentFrameNumber == frameNumber) {
3719 int32_t rc = mDepthChannel->populateDepthData(depthData,
3720 frameNumber);
3721 if (NO_ERROR != rc) {
3722 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3723 } else {
3724 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3725 }
3726 } else if (currentFrameNumber > frameNumber) {
3727 break;
3728 } else {
3729 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3730 {{currentFrameNumber, mDepthChannel->getStream(),
3731 CAMERA3_MSG_ERROR_BUFFER}}};
3732 orchestrateNotify(&notify_msg);
3733
3734 LOGE("Depth buffer for frame number: %d is missing "
3735 "returning back!", currentFrameNumber);
3736 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3737 }
3738 mDepthChannel->unmapBuffer(currentFrameNumber);
3739
3740 orchestrateResult(&result);
3741 } while (currentFrameNumber < frameNumber);
3742}
3743
3744/*===========================================================================
3745 * FUNCTION : notifyErrorFoPendingDepthData
3746 *
3747 * DESCRIPTION: Returns error for any pending depth buffers
3748 *
3749 * PARAMETERS : depthCh - depth channel that needs to get flushed
3750 *
3751 * RETURN :
3752 *
3753 *==========================================================================*/
3754void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3755 QCamera3DepthChannel *depthCh) {
3756 uint32_t currentFrameNumber;
3757 buffer_handle_t *depthBuffer;
3758
3759 if (nullptr == depthCh) {
3760 return;
3761 }
3762
3763 camera3_notify_msg_t notify_msg =
3764 {.type = CAMERA3_MSG_ERROR,
3765 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3766 camera3_stream_buffer_t resultBuffer =
3767 {.acquire_fence = -1,
3768 .release_fence = -1,
3769 .buffer = nullptr,
3770 .stream = depthCh->getStream(),
3771 .status = CAMERA3_BUFFER_STATUS_ERROR};
3772 camera3_capture_result_t result =
3773 {.result = nullptr,
3774 .frame_number = 0,
3775 .num_output_buffers = 1,
3776 .partial_result = 0,
3777 .output_buffers = &resultBuffer};
3778
3779 while (nullptr !=
3780 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3781 depthCh->unmapBuffer(currentFrameNumber);
3782
3783 notify_msg.message.error.frame_number = currentFrameNumber;
3784 orchestrateNotify(&notify_msg);
3785
3786 resultBuffer.buffer = depthBuffer;
3787 result.frame_number = currentFrameNumber;
3788 orchestrateResult(&result);
3789 };
3790}
3791
3792/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003793 * FUNCTION : hdrPlusPerfLock
3794 *
3795 * DESCRIPTION: perf lock for HDR+ using custom intent
3796 *
3797 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3798 *
3799 * RETURN : None
3800 *
3801 *==========================================================================*/
3802void QCamera3HardwareInterface::hdrPlusPerfLock(
3803 mm_camera_super_buf_t *metadata_buf)
3804{
3805 if (NULL == metadata_buf) {
3806 LOGE("metadata_buf is NULL");
3807 return;
3808 }
3809 metadata_buffer_t *metadata =
3810 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3811 int32_t *p_frame_number_valid =
3812 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3813 uint32_t *p_frame_number =
3814 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3815
3816 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3817 LOGE("%s: Invalid metadata", __func__);
3818 return;
3819 }
3820
3821 //acquire perf lock for 5 sec after the last HDR frame is captured
3822 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3823 if ((p_frame_number != NULL) &&
3824 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003825 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003826 }
3827 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003828}
3829
3830/*===========================================================================
3831 * FUNCTION : handleInputBufferWithLock
3832 *
3833 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3834 *
3835 * PARAMETERS : @frame_number: frame number of the input buffer
3836 *
3837 * RETURN :
3838 *
3839 *==========================================================================*/
3840void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3841{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003842 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003843 pendingRequestIterator i = mPendingRequestsList.begin();
3844 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3845 i++;
3846 }
3847 if (i != mPendingRequestsList.end() && i->input_buffer) {
3848 //found the right request
3849 if (!i->shutter_notified) {
3850 CameraMetadata settings;
3851 camera3_notify_msg_t notify_msg;
3852 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3853 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3854 if(i->settings) {
3855 settings = i->settings;
3856 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3857 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3858 } else {
3859 LOGE("No timestamp in input settings! Using current one.");
3860 }
3861 } else {
3862 LOGE("Input settings missing!");
3863 }
3864
3865 notify_msg.type = CAMERA3_MSG_SHUTTER;
3866 notify_msg.message.shutter.frame_number = frame_number;
3867 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003868 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003869 i->shutter_notified = true;
3870 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3871 i->frame_number, notify_msg.message.shutter.timestamp);
3872 }
3873
3874 if (i->input_buffer->release_fence != -1) {
3875 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3876 close(i->input_buffer->release_fence);
3877 if (rc != OK) {
3878 LOGE("input buffer sync wait failed %d", rc);
3879 }
3880 }
3881
3882 camera3_capture_result result;
3883 memset(&result, 0, sizeof(camera3_capture_result));
3884 result.frame_number = frame_number;
3885 result.result = i->settings;
3886 result.input_buffer = i->input_buffer;
3887 result.partial_result = PARTIAL_RESULT_COUNT;
3888
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003889 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003890 LOGD("Input request metadata and input buffer frame_number = %u",
3891 i->frame_number);
3892 i = erasePendingRequest(i);
3893 } else {
3894 LOGE("Could not find input request for frame number %d", frame_number);
3895 }
3896}
3897
3898/*===========================================================================
3899 * FUNCTION : handleBufferWithLock
3900 *
3901 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3902 *
3903 * PARAMETERS : @buffer: image buffer for the callback
3904 * @frame_number: frame number of the image buffer
3905 *
3906 * RETURN :
3907 *
3908 *==========================================================================*/
3909void QCamera3HardwareInterface::handleBufferWithLock(
3910 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3911{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003912 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003913
3914 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3915 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3916 }
3917
Thierry Strudel3d639192016-09-09 11:52:26 -07003918 /* Nothing to be done during error state */
3919 if ((ERROR == mState) || (DEINIT == mState)) {
3920 return;
3921 }
3922 if (mFlushPerf) {
3923 handleBuffersDuringFlushLock(buffer);
3924 return;
3925 }
3926 //not in flush
3927 // If the frame number doesn't exist in the pending request list,
3928 // directly send the buffer to the frameworks, and update pending buffers map
3929 // Otherwise, book-keep the buffer.
3930 pendingRequestIterator i = mPendingRequestsList.begin();
3931 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3932 i++;
3933 }
3934 if (i == mPendingRequestsList.end()) {
3935 // Verify all pending requests frame_numbers are greater
3936 for (pendingRequestIterator j = mPendingRequestsList.begin();
3937 j != mPendingRequestsList.end(); j++) {
3938 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3939 LOGW("Error: pending live frame number %d is smaller than %d",
3940 j->frame_number, frame_number);
3941 }
3942 }
3943 camera3_capture_result_t result;
3944 memset(&result, 0, sizeof(camera3_capture_result_t));
3945 result.result = NULL;
3946 result.frame_number = frame_number;
3947 result.num_output_buffers = 1;
3948 result.partial_result = 0;
3949 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3950 m != mPendingFrameDropList.end(); m++) {
3951 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3952 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3953 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3954 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3955 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3956 frame_number, streamID);
3957 m = mPendingFrameDropList.erase(m);
3958 break;
3959 }
3960 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003961 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003962 result.output_buffers = buffer;
3963 LOGH("result frame_number = %d, buffer = %p",
3964 frame_number, buffer->buffer);
3965
3966 mPendingBuffersMap.removeBuf(buffer->buffer);
3967
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003968 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003969 } else {
3970 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003971 if (i->input_buffer->release_fence != -1) {
3972 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3973 close(i->input_buffer->release_fence);
3974 if (rc != OK) {
3975 LOGE("input buffer sync wait failed %d", rc);
3976 }
3977 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003978 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003979
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003980 // Put buffer into the pending request
3981 for (auto &requestedBuffer : i->buffers) {
3982 if (requestedBuffer.stream == buffer->stream) {
3983 if (requestedBuffer.buffer != nullptr) {
3984 LOGE("Error: buffer is already set");
3985 } else {
3986 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3987 sizeof(camera3_stream_buffer_t));
3988 *(requestedBuffer.buffer) = *buffer;
3989 LOGH("cache buffer %p at result frame_number %u",
3990 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003991 }
3992 }
3993 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003994
3995 if (i->input_buffer) {
3996 // For a reprocessing request, try to send out shutter callback and result metadata.
3997 handlePendingResultsWithLock(frame_number, nullptr);
3998 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003999 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004000
4001 if (mPreviewStarted == false) {
4002 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4003 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004004 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4005
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004006 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4007 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4008 mPreviewStarted = true;
4009
4010 // Set power hint for preview
4011 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4012 }
4013 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004014}
4015
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004016void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4017 const camera_metadata_t *resultMetadata)
4018{
4019 // Find the pending request for this result metadata.
4020 auto requestIter = mPendingRequestsList.begin();
4021 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4022 requestIter++;
4023 }
4024
4025 if (requestIter == mPendingRequestsList.end()) {
4026 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4027 return;
4028 }
4029
4030 // Update the result metadata
4031 requestIter->resultMetadata = resultMetadata;
4032
4033 // Check what type of request this is.
4034 bool liveRequest = false;
4035 if (requestIter->hdrplus) {
4036 // HDR+ request doesn't have partial results.
4037 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4038 } else if (requestIter->input_buffer != nullptr) {
4039 // Reprocessing request result is the same as settings.
4040 requestIter->resultMetadata = requestIter->settings;
4041 // Reprocessing request doesn't have partial results.
4042 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4043 } else {
4044 liveRequest = true;
4045 requestIter->partial_result_cnt++;
4046 mPendingLiveRequest--;
4047
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004048 {
4049 Mutex::Autolock l(gHdrPlusClientLock);
4050 // For a live request, send the metadata to HDR+ client.
4051 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4052 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4053 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4054 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004055 }
4056 }
4057
4058 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4059 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4060 bool readyToSend = true;
4061
4062 // Iterate through the pending requests to send out shutter callbacks and results that are
4063 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4064 // live requests that don't have result metadata yet.
4065 auto iter = mPendingRequestsList.begin();
4066 while (iter != mPendingRequestsList.end()) {
4067 // Check if current pending request is ready. If it's not ready, the following pending
4068 // requests are also not ready.
4069 if (readyToSend && iter->resultMetadata == nullptr) {
4070 readyToSend = false;
4071 }
4072
4073 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4074
4075 std::vector<camera3_stream_buffer_t> outputBuffers;
4076
4077 camera3_capture_result_t result = {};
4078 result.frame_number = iter->frame_number;
4079 result.result = iter->resultMetadata;
4080 result.partial_result = iter->partial_result_cnt;
4081
4082 // If this pending buffer has result metadata, we may be able to send out shutter callback
4083 // and result metadata.
4084 if (iter->resultMetadata != nullptr) {
4085 if (!readyToSend) {
4086 // If any of the previous pending request is not ready, this pending request is
4087 // also not ready to send in order to keep shutter callbacks and result metadata
4088 // in order.
4089 iter++;
4090 continue;
4091 }
4092
4093 // Invoke shutter callback if not yet.
4094 if (!iter->shutter_notified) {
4095 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4096
4097 // Find the timestamp in HDR+ result metadata
4098 camera_metadata_ro_entry_t entry;
4099 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4100 ANDROID_SENSOR_TIMESTAMP, &entry);
4101 if (res != OK) {
4102 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4103 __FUNCTION__, iter->frame_number, strerror(-res), res);
4104 } else {
4105 timestamp = entry.data.i64[0];
4106 }
4107
4108 camera3_notify_msg_t notify_msg = {};
4109 notify_msg.type = CAMERA3_MSG_SHUTTER;
4110 notify_msg.message.shutter.frame_number = iter->frame_number;
4111 notify_msg.message.shutter.timestamp = timestamp;
4112 orchestrateNotify(&notify_msg);
4113 iter->shutter_notified = true;
4114 }
4115
4116 result.input_buffer = iter->input_buffer;
4117
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004118 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4119 // If the result metadata belongs to a live request, notify errors for previous pending
4120 // live requests.
4121 mPendingLiveRequest--;
4122
4123 CameraMetadata dummyMetadata;
4124 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4125 result.result = dummyMetadata.release();
4126
4127 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004128
4129 // partial_result should be PARTIAL_RESULT_CNT in case of
4130 // ERROR_RESULT.
4131 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4132 result.partial_result = PARTIAL_RESULT_COUNT;
4133
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004134 } else {
4135 iter++;
4136 continue;
4137 }
4138
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004139 // Prepare output buffer array
4140 for (auto bufferInfoIter = iter->buffers.begin();
4141 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4142 if (bufferInfoIter->buffer != nullptr) {
4143
4144 QCamera3Channel *channel =
4145 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4146 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4147
4148 // Check if this buffer is a dropped frame.
4149 auto frameDropIter = mPendingFrameDropList.begin();
4150 while (frameDropIter != mPendingFrameDropList.end()) {
4151 if((frameDropIter->stream_ID == streamID) &&
4152 (frameDropIter->frame_number == frameNumber)) {
4153 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4154 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4155 streamID);
4156 mPendingFrameDropList.erase(frameDropIter);
4157 break;
4158 } else {
4159 frameDropIter++;
4160 }
4161 }
4162
4163 // Check buffer error status
4164 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4165 bufferInfoIter->buffer->buffer);
4166 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4167
4168 outputBuffers.push_back(*(bufferInfoIter->buffer));
4169 free(bufferInfoIter->buffer);
4170 bufferInfoIter->buffer = NULL;
4171 }
4172 }
4173
4174 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4175 result.num_output_buffers = outputBuffers.size();
4176
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004177 orchestrateResult(&result);
4178
4179 // For reprocessing, result metadata is the same as settings so do not free it here to
4180 // avoid double free.
4181 if (result.result != iter->settings) {
4182 free_camera_metadata((camera_metadata_t *)result.result);
4183 }
4184 iter->resultMetadata = nullptr;
4185 iter = erasePendingRequest(iter);
4186 }
4187
4188 if (liveRequest) {
4189 for (auto &iter : mPendingRequestsList) {
4190 // Increment pipeline depth for the following pending requests.
4191 if (iter.frame_number > frameNumber) {
4192 iter.pipeline_depth++;
4193 }
4194 }
4195 }
4196
4197 unblockRequestIfNecessary();
4198}
4199
Thierry Strudel3d639192016-09-09 11:52:26 -07004200/*===========================================================================
4201 * FUNCTION : unblockRequestIfNecessary
4202 *
4203 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4204 * that mMutex is held when this function is called.
4205 *
4206 * PARAMETERS :
4207 *
4208 * RETURN :
4209 *
4210 *==========================================================================*/
4211void QCamera3HardwareInterface::unblockRequestIfNecessary()
4212{
4213 // Unblock process_capture_request
4214 pthread_cond_signal(&mRequestCond);
4215}
4216
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004217/*===========================================================================
4218 * FUNCTION : isHdrSnapshotRequest
4219 *
4220 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4221 *
4222 * PARAMETERS : camera3 request structure
4223 *
4224 * RETURN : boolean decision variable
4225 *
4226 *==========================================================================*/
4227bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4228{
4229 if (request == NULL) {
4230 LOGE("Invalid request handle");
4231 assert(0);
4232 return false;
4233 }
4234
4235 if (!mForceHdrSnapshot) {
4236 CameraMetadata frame_settings;
4237 frame_settings = request->settings;
4238
4239 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4240 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4241 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4242 return false;
4243 }
4244 } else {
4245 return false;
4246 }
4247
4248 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4249 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4250 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4251 return false;
4252 }
4253 } else {
4254 return false;
4255 }
4256 }
4257
4258 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4259 if (request->output_buffers[i].stream->format
4260 == HAL_PIXEL_FORMAT_BLOB) {
4261 return true;
4262 }
4263 }
4264
4265 return false;
4266}
4267/*===========================================================================
4268 * FUNCTION : orchestrateRequest
4269 *
4270 * DESCRIPTION: Orchestrates a capture request from camera service
4271 *
4272 * PARAMETERS :
4273 * @request : request from framework to process
4274 *
4275 * RETURN : Error status codes
4276 *
4277 *==========================================================================*/
4278int32_t QCamera3HardwareInterface::orchestrateRequest(
4279 camera3_capture_request_t *request)
4280{
4281
4282 uint32_t originalFrameNumber = request->frame_number;
4283 uint32_t originalOutputCount = request->num_output_buffers;
4284 const camera_metadata_t *original_settings = request->settings;
4285 List<InternalRequest> internallyRequestedStreams;
4286 List<InternalRequest> emptyInternalList;
4287
4288 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4289 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4290 uint32_t internalFrameNumber;
4291 CameraMetadata modified_meta;
4292
4293
4294 /* Add Blob channel to list of internally requested streams */
4295 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4296 if (request->output_buffers[i].stream->format
4297 == HAL_PIXEL_FORMAT_BLOB) {
4298 InternalRequest streamRequested;
4299 streamRequested.meteringOnly = 1;
4300 streamRequested.need_metadata = 0;
4301 streamRequested.stream = request->output_buffers[i].stream;
4302 internallyRequestedStreams.push_back(streamRequested);
4303 }
4304 }
4305 request->num_output_buffers = 0;
4306 auto itr = internallyRequestedStreams.begin();
4307
4308 /* Modify setting to set compensation */
4309 modified_meta = request->settings;
4310 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4311 uint8_t aeLock = 1;
4312 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4313 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4314 camera_metadata_t *modified_settings = modified_meta.release();
4315 request->settings = modified_settings;
4316
4317 /* Capture Settling & -2x frame */
4318 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4319 request->frame_number = internalFrameNumber;
4320 processCaptureRequest(request, internallyRequestedStreams);
4321
4322 request->num_output_buffers = originalOutputCount;
4323 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4324 request->frame_number = internalFrameNumber;
4325 processCaptureRequest(request, emptyInternalList);
4326 request->num_output_buffers = 0;
4327
4328 modified_meta = modified_settings;
4329 expCompensation = 0;
4330 aeLock = 1;
4331 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4332 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4333 modified_settings = modified_meta.release();
4334 request->settings = modified_settings;
4335
4336 /* Capture Settling & 0X frame */
4337
4338 itr = internallyRequestedStreams.begin();
4339 if (itr == internallyRequestedStreams.end()) {
4340 LOGE("Error Internally Requested Stream list is empty");
4341 assert(0);
4342 } else {
4343 itr->need_metadata = 0;
4344 itr->meteringOnly = 1;
4345 }
4346
4347 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4348 request->frame_number = internalFrameNumber;
4349 processCaptureRequest(request, internallyRequestedStreams);
4350
4351 itr = internallyRequestedStreams.begin();
4352 if (itr == internallyRequestedStreams.end()) {
4353 ALOGE("Error Internally Requested Stream list is empty");
4354 assert(0);
4355 } else {
4356 itr->need_metadata = 1;
4357 itr->meteringOnly = 0;
4358 }
4359
4360 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4361 request->frame_number = internalFrameNumber;
4362 processCaptureRequest(request, internallyRequestedStreams);
4363
4364 /* Capture 2X frame*/
4365 modified_meta = modified_settings;
4366 expCompensation = GB_HDR_2X_STEP_EV;
4367 aeLock = 1;
4368 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4369 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4370 modified_settings = modified_meta.release();
4371 request->settings = modified_settings;
4372
4373 itr = internallyRequestedStreams.begin();
4374 if (itr == internallyRequestedStreams.end()) {
4375 ALOGE("Error Internally Requested Stream list is empty");
4376 assert(0);
4377 } else {
4378 itr->need_metadata = 0;
4379 itr->meteringOnly = 1;
4380 }
4381 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4382 request->frame_number = internalFrameNumber;
4383 processCaptureRequest(request, internallyRequestedStreams);
4384
4385 itr = internallyRequestedStreams.begin();
4386 if (itr == internallyRequestedStreams.end()) {
4387 ALOGE("Error Internally Requested Stream list is empty");
4388 assert(0);
4389 } else {
4390 itr->need_metadata = 1;
4391 itr->meteringOnly = 0;
4392 }
4393
4394 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4395 request->frame_number = internalFrameNumber;
4396 processCaptureRequest(request, internallyRequestedStreams);
4397
4398
4399 /* Capture 2X on original streaming config*/
4400 internallyRequestedStreams.clear();
4401
4402 /* Restore original settings pointer */
4403 request->settings = original_settings;
4404 } else {
4405 uint32_t internalFrameNumber;
4406 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4407 request->frame_number = internalFrameNumber;
4408 return processCaptureRequest(request, internallyRequestedStreams);
4409 }
4410
4411 return NO_ERROR;
4412}
4413
4414/*===========================================================================
4415 * FUNCTION : orchestrateResult
4416 *
4417 * DESCRIPTION: Orchestrates a capture result to camera service
4418 *
4419 * PARAMETERS :
4420 * @request : request from framework to process
4421 *
4422 * RETURN :
4423 *
4424 *==========================================================================*/
4425void QCamera3HardwareInterface::orchestrateResult(
4426 camera3_capture_result_t *result)
4427{
4428 uint32_t frameworkFrameNumber;
4429 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4430 frameworkFrameNumber);
4431 if (rc != NO_ERROR) {
4432 LOGE("Cannot find translated frameworkFrameNumber");
4433 assert(0);
4434 } else {
4435 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004436 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004437 } else {
4438 result->frame_number = frameworkFrameNumber;
4439 mCallbackOps->process_capture_result(mCallbackOps, result);
4440 }
4441 }
4442}
4443
4444/*===========================================================================
4445 * FUNCTION : orchestrateNotify
4446 *
4447 * DESCRIPTION: Orchestrates a notify to camera service
4448 *
4449 * PARAMETERS :
4450 * @request : request from framework to process
4451 *
4452 * RETURN :
4453 *
4454 *==========================================================================*/
4455void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4456{
4457 uint32_t frameworkFrameNumber;
4458 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004459 int32_t rc = NO_ERROR;
4460
4461 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004462 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004463
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004464 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004465 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4466 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4467 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004468 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004469 LOGE("Cannot find translated frameworkFrameNumber");
4470 assert(0);
4471 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004472 }
4473 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004474
4475 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4476 LOGD("Internal Request drop the notifyCb");
4477 } else {
4478 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4479 mCallbackOps->notify(mCallbackOps, notify_msg);
4480 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004481}
4482
4483/*===========================================================================
4484 * FUNCTION : FrameNumberRegistry
4485 *
4486 * DESCRIPTION: Constructor
4487 *
4488 * PARAMETERS :
4489 *
4490 * RETURN :
4491 *
4492 *==========================================================================*/
4493FrameNumberRegistry::FrameNumberRegistry()
4494{
4495 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4496}
4497
4498/*===========================================================================
4499 * FUNCTION : ~FrameNumberRegistry
4500 *
4501 * DESCRIPTION: Destructor
4502 *
4503 * PARAMETERS :
4504 *
4505 * RETURN :
4506 *
4507 *==========================================================================*/
4508FrameNumberRegistry::~FrameNumberRegistry()
4509{
4510}
4511
4512/*===========================================================================
4513 * FUNCTION : PurgeOldEntriesLocked
4514 *
4515 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4516 *
4517 * PARAMETERS :
4518 *
4519 * RETURN : NONE
4520 *
4521 *==========================================================================*/
4522void FrameNumberRegistry::purgeOldEntriesLocked()
4523{
4524 while (_register.begin() != _register.end()) {
4525 auto itr = _register.begin();
4526 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4527 _register.erase(itr);
4528 } else {
4529 return;
4530 }
4531 }
4532}
4533
4534/*===========================================================================
4535 * FUNCTION : allocStoreInternalFrameNumber
4536 *
4537 * DESCRIPTION: Method to note down a framework request and associate a new
4538 * internal request number against it
4539 *
4540 * PARAMETERS :
4541 * @fFrameNumber: Identifier given by framework
4542 * @internalFN : Output parameter which will have the newly generated internal
4543 * entry
4544 *
4545 * RETURN : Error code
4546 *
4547 *==========================================================================*/
4548int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4549 uint32_t &internalFrameNumber)
4550{
4551 Mutex::Autolock lock(mRegistryLock);
4552 internalFrameNumber = _nextFreeInternalNumber++;
4553 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4554 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4555 purgeOldEntriesLocked();
4556 return NO_ERROR;
4557}
4558
4559/*===========================================================================
4560 * FUNCTION : generateStoreInternalFrameNumber
4561 *
4562 * DESCRIPTION: Method to associate a new internal request number independent
4563 * of any associate with framework requests
4564 *
4565 * PARAMETERS :
4566 * @internalFrame#: Output parameter which will have the newly generated internal
4567 *
4568 *
4569 * RETURN : Error code
4570 *
4571 *==========================================================================*/
4572int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4573{
4574 Mutex::Autolock lock(mRegistryLock);
4575 internalFrameNumber = _nextFreeInternalNumber++;
4576 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4577 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4578 purgeOldEntriesLocked();
4579 return NO_ERROR;
4580}
4581
4582/*===========================================================================
4583 * FUNCTION : getFrameworkFrameNumber
4584 *
4585 * DESCRIPTION: Method to query the framework framenumber given an internal #
4586 *
4587 * PARAMETERS :
4588 * @internalFrame#: Internal reference
4589 * @frameworkframenumber: Output parameter holding framework frame entry
4590 *
4591 * RETURN : Error code
4592 *
4593 *==========================================================================*/
4594int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4595 uint32_t &frameworkFrameNumber)
4596{
4597 Mutex::Autolock lock(mRegistryLock);
4598 auto itr = _register.find(internalFrameNumber);
4599 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004600 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004601 return -ENOENT;
4602 }
4603
4604 frameworkFrameNumber = itr->second;
4605 purgeOldEntriesLocked();
4606 return NO_ERROR;
4607}
Thierry Strudel3d639192016-09-09 11:52:26 -07004608
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004609status_t QCamera3HardwareInterface::fillPbStreamConfig(
4610 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4611 QCamera3Channel *channel, uint32_t streamIndex) {
4612 if (config == nullptr) {
4613 LOGE("%s: config is null", __FUNCTION__);
4614 return BAD_VALUE;
4615 }
4616
4617 if (channel == nullptr) {
4618 LOGE("%s: channel is null", __FUNCTION__);
4619 return BAD_VALUE;
4620 }
4621
4622 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4623 if (stream == nullptr) {
4624 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4625 return NAME_NOT_FOUND;
4626 }
4627
4628 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4629 if (streamInfo == nullptr) {
4630 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4631 return NAME_NOT_FOUND;
4632 }
4633
4634 config->id = pbStreamId;
4635 config->image.width = streamInfo->dim.width;
4636 config->image.height = streamInfo->dim.height;
4637 config->image.padding = 0;
4638 config->image.format = pbStreamFormat;
4639
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004640 uint32_t totalPlaneSize = 0;
4641
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004642 // Fill plane information.
4643 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4644 pbcamera::PlaneConfiguration plane;
4645 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4646 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4647 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004648
4649 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004650 }
4651
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004652 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004653 return OK;
4654}
4655
Thierry Strudel3d639192016-09-09 11:52:26 -07004656/*===========================================================================
4657 * FUNCTION : processCaptureRequest
4658 *
4659 * DESCRIPTION: process a capture request from camera service
4660 *
4661 * PARAMETERS :
4662 * @request : request from framework to process
4663 *
4664 * RETURN :
4665 *
4666 *==========================================================================*/
4667int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004668 camera3_capture_request_t *request,
4669 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004670{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004671 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004672 int rc = NO_ERROR;
4673 int32_t request_id;
4674 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004675 bool isVidBufRequested = false;
4676 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004677 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004678
4679 pthread_mutex_lock(&mMutex);
4680
4681 // Validate current state
4682 switch (mState) {
4683 case CONFIGURED:
4684 case STARTED:
4685 /* valid state */
4686 break;
4687
4688 case ERROR:
4689 pthread_mutex_unlock(&mMutex);
4690 handleCameraDeviceError();
4691 return -ENODEV;
4692
4693 default:
4694 LOGE("Invalid state %d", mState);
4695 pthread_mutex_unlock(&mMutex);
4696 return -ENODEV;
4697 }
4698
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004699 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004700 if (rc != NO_ERROR) {
4701 LOGE("incoming request is not valid");
4702 pthread_mutex_unlock(&mMutex);
4703 return rc;
4704 }
4705
4706 meta = request->settings;
4707
4708 // For first capture request, send capture intent, and
4709 // stream on all streams
4710 if (mState == CONFIGURED) {
4711 // send an unconfigure to the backend so that the isp
4712 // resources are deallocated
4713 if (!mFirstConfiguration) {
4714 cam_stream_size_info_t stream_config_info;
4715 int32_t hal_version = CAM_HAL_V3;
4716 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4717 stream_config_info.buffer_info.min_buffers =
4718 MIN_INFLIGHT_REQUESTS;
4719 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004720 m_bIs4KVideo ? 0 :
4721 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004722 clear_metadata_buffer(mParameters);
4723 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4724 CAM_INTF_PARM_HAL_VERSION, hal_version);
4725 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4726 CAM_INTF_META_STREAM_INFO, stream_config_info);
4727 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4728 mParameters);
4729 if (rc < 0) {
4730 LOGE("set_parms for unconfigure failed");
4731 pthread_mutex_unlock(&mMutex);
4732 return rc;
4733 }
4734 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004735 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004736 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004737 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004738 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004739 property_get("persist.camera.is_type", is_type_value, "4");
4740 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4741 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4742 property_get("persist.camera.is_type_preview", is_type_value, "4");
4743 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4744 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004745
4746 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4747 int32_t hal_version = CAM_HAL_V3;
4748 uint8_t captureIntent =
4749 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4750 mCaptureIntent = captureIntent;
4751 clear_metadata_buffer(mParameters);
4752 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4753 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4754 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004755 if (mFirstConfiguration) {
4756 // configure instant AEC
4757 // Instant AEC is a session based parameter and it is needed only
4758 // once per complete session after open camera.
4759 // i.e. This is set only once for the first capture request, after open camera.
4760 setInstantAEC(meta);
4761 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004762 uint8_t fwkVideoStabMode=0;
4763 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4764 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4765 }
4766
4767 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4768 // turn it on for video/preview
4769 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4770 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004771 int32_t vsMode;
4772 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4773 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4774 rc = BAD_VALUE;
4775 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004776 LOGD("setEis %d", setEis);
4777 bool eis3Supported = false;
4778 size_t count = IS_TYPE_MAX;
4779 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4780 for (size_t i = 0; i < count; i++) {
4781 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4782 eis3Supported = true;
4783 break;
4784 }
4785 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004786
4787 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004788 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004789 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4790 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004791 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4792 is_type = isTypePreview;
4793 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4794 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4795 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004796 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004797 } else {
4798 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004799 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004800 } else {
4801 is_type = IS_TYPE_NONE;
4802 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004803 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004804 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004805 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4806 }
4807 }
4808
4809 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4810 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4811
Thierry Strudel54dc9782017-02-15 12:12:10 -08004812 //Disable tintless only if the property is set to 0
4813 memset(prop, 0, sizeof(prop));
4814 property_get("persist.camera.tintless.enable", prop, "1");
4815 int32_t tintless_value = atoi(prop);
4816
Thierry Strudel3d639192016-09-09 11:52:26 -07004817 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4818 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004819
Thierry Strudel3d639192016-09-09 11:52:26 -07004820 //Disable CDS for HFR mode or if DIS/EIS is on.
4821 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4822 //after every configure_stream
4823 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4824 (m_bIsVideo)) {
4825 int32_t cds = CAM_CDS_MODE_OFF;
4826 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4827 CAM_INTF_PARM_CDS_MODE, cds))
4828 LOGE("Failed to disable CDS for HFR mode");
4829
4830 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004831
4832 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4833 uint8_t* use_av_timer = NULL;
4834
4835 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004836 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004837 use_av_timer = &m_debug_avtimer;
4838 }
4839 else{
4840 use_av_timer =
4841 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004842 if (use_av_timer) {
4843 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4844 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004845 }
4846
4847 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4848 rc = BAD_VALUE;
4849 }
4850 }
4851
Thierry Strudel3d639192016-09-09 11:52:26 -07004852 setMobicat();
4853
4854 /* Set fps and hfr mode while sending meta stream info so that sensor
4855 * can configure appropriate streaming mode */
4856 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004857 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4858 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004859 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4860 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004861 if (rc == NO_ERROR) {
4862 int32_t max_fps =
4863 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004864 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004865 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4866 }
4867 /* For HFR, more buffers are dequeued upfront to improve the performance */
4868 if (mBatchSize) {
4869 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4870 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4871 }
4872 }
4873 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004874 LOGE("setHalFpsRange failed");
4875 }
4876 }
4877 if (meta.exists(ANDROID_CONTROL_MODE)) {
4878 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4879 rc = extractSceneMode(meta, metaMode, mParameters);
4880 if (rc != NO_ERROR) {
4881 LOGE("extractSceneMode failed");
4882 }
4883 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004884 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004885
Thierry Strudel04e026f2016-10-10 11:27:36 -07004886 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4887 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4888 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4889 rc = setVideoHdrMode(mParameters, vhdr);
4890 if (rc != NO_ERROR) {
4891 LOGE("setVideoHDR is failed");
4892 }
4893 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004894
Thierry Strudel3d639192016-09-09 11:52:26 -07004895 //TODO: validate the arguments, HSV scenemode should have only the
4896 //advertised fps ranges
4897
4898 /*set the capture intent, hal version, tintless, stream info,
4899 *and disenable parameters to the backend*/
4900 LOGD("set_parms META_STREAM_INFO " );
4901 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004902 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4903 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004904 mStreamConfigInfo.type[i],
4905 mStreamConfigInfo.stream_sizes[i].width,
4906 mStreamConfigInfo.stream_sizes[i].height,
4907 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004908 mStreamConfigInfo.format[i],
4909 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004910 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004911
Thierry Strudel3d639192016-09-09 11:52:26 -07004912 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4913 mParameters);
4914 if (rc < 0) {
4915 LOGE("set_parms failed for hal version, stream info");
4916 }
4917
Chien-Yu Chenee335912017-02-09 17:53:20 -08004918 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4919 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004920 if (rc != NO_ERROR) {
4921 LOGE("Failed to get sensor output size");
4922 pthread_mutex_unlock(&mMutex);
4923 goto error_exit;
4924 }
4925
4926 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4927 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004928 mSensorModeInfo.active_array_size.width,
4929 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004930
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004931 {
4932 Mutex::Autolock l(gHdrPlusClientLock);
4933 if (EaselManagerClientOpened) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004934 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004935 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
4936 if (rc != OK) {
4937 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
4938 mCameraId, mSensorModeInfo.op_pixel_clk);
4939 pthread_mutex_unlock(&mMutex);
4940 goto error_exit;
4941 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004942 }
4943 }
4944
Thierry Strudel3d639192016-09-09 11:52:26 -07004945 /* Set batchmode before initializing channel. Since registerBuffer
4946 * internally initializes some of the channels, better set batchmode
4947 * even before first register buffer */
4948 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4949 it != mStreamInfo.end(); it++) {
4950 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4951 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4952 && mBatchSize) {
4953 rc = channel->setBatchSize(mBatchSize);
4954 //Disable per frame map unmap for HFR/batchmode case
4955 rc |= channel->setPerFrameMapUnmap(false);
4956 if (NO_ERROR != rc) {
4957 LOGE("Channel init failed %d", rc);
4958 pthread_mutex_unlock(&mMutex);
4959 goto error_exit;
4960 }
4961 }
4962 }
4963
4964 //First initialize all streams
4965 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4966 it != mStreamInfo.end(); it++) {
4967 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4968 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4969 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004970 setEis) {
4971 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4972 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4973 is_type = mStreamConfigInfo.is_type[i];
4974 break;
4975 }
4976 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004977 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004978 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004979 rc = channel->initialize(IS_TYPE_NONE);
4980 }
4981 if (NO_ERROR != rc) {
4982 LOGE("Channel initialization failed %d", rc);
4983 pthread_mutex_unlock(&mMutex);
4984 goto error_exit;
4985 }
4986 }
4987
4988 if (mRawDumpChannel) {
4989 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4990 if (rc != NO_ERROR) {
4991 LOGE("Error: Raw Dump Channel init failed");
4992 pthread_mutex_unlock(&mMutex);
4993 goto error_exit;
4994 }
4995 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004996 if (mHdrPlusRawSrcChannel) {
4997 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4998 if (rc != NO_ERROR) {
4999 LOGE("Error: HDR+ RAW Source Channel init failed");
5000 pthread_mutex_unlock(&mMutex);
5001 goto error_exit;
5002 }
5003 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005004 if (mSupportChannel) {
5005 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5006 if (rc < 0) {
5007 LOGE("Support channel initialization failed");
5008 pthread_mutex_unlock(&mMutex);
5009 goto error_exit;
5010 }
5011 }
5012 if (mAnalysisChannel) {
5013 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5014 if (rc < 0) {
5015 LOGE("Analysis channel initialization failed");
5016 pthread_mutex_unlock(&mMutex);
5017 goto error_exit;
5018 }
5019 }
5020 if (mDummyBatchChannel) {
5021 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5022 if (rc < 0) {
5023 LOGE("mDummyBatchChannel setBatchSize failed");
5024 pthread_mutex_unlock(&mMutex);
5025 goto error_exit;
5026 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005027 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005028 if (rc < 0) {
5029 LOGE("mDummyBatchChannel initialization failed");
5030 pthread_mutex_unlock(&mMutex);
5031 goto error_exit;
5032 }
5033 }
5034
5035 // Set bundle info
5036 rc = setBundleInfo();
5037 if (rc < 0) {
5038 LOGE("setBundleInfo failed %d", rc);
5039 pthread_mutex_unlock(&mMutex);
5040 goto error_exit;
5041 }
5042
5043 //update settings from app here
5044 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5045 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5046 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5047 }
5048 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5049 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5050 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5051 }
5052 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5053 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5054 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5055
5056 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5057 (mLinkedCameraId != mCameraId) ) {
5058 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5059 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005060 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005061 goto error_exit;
5062 }
5063 }
5064
5065 // add bundle related cameras
5066 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5067 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005068 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5069 &m_pDualCamCmdPtr->bundle_info;
5070 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005071 if (mIsDeviceLinked)
5072 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5073 else
5074 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5075
5076 pthread_mutex_lock(&gCamLock);
5077
5078 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5079 LOGE("Dualcam: Invalid Session Id ");
5080 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005081 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005082 goto error_exit;
5083 }
5084
5085 if (mIsMainCamera == 1) {
5086 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5087 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005088 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005089 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005090 // related session id should be session id of linked session
5091 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5092 } else {
5093 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5094 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005095 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005096 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005097 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5098 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005099 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005100 pthread_mutex_unlock(&gCamLock);
5101
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005102 rc = mCameraHandle->ops->set_dual_cam_cmd(
5103 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005104 if (rc < 0) {
5105 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005106 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005107 goto error_exit;
5108 }
5109 }
5110
5111 //Then start them.
5112 LOGH("Start META Channel");
5113 rc = mMetadataChannel->start();
5114 if (rc < 0) {
5115 LOGE("META channel start failed");
5116 pthread_mutex_unlock(&mMutex);
5117 goto error_exit;
5118 }
5119
5120 if (mAnalysisChannel) {
5121 rc = mAnalysisChannel->start();
5122 if (rc < 0) {
5123 LOGE("Analysis channel start failed");
5124 mMetadataChannel->stop();
5125 pthread_mutex_unlock(&mMutex);
5126 goto error_exit;
5127 }
5128 }
5129
5130 if (mSupportChannel) {
5131 rc = mSupportChannel->start();
5132 if (rc < 0) {
5133 LOGE("Support channel start failed");
5134 mMetadataChannel->stop();
5135 /* Although support and analysis are mutually exclusive today
5136 adding it in anycase for future proofing */
5137 if (mAnalysisChannel) {
5138 mAnalysisChannel->stop();
5139 }
5140 pthread_mutex_unlock(&mMutex);
5141 goto error_exit;
5142 }
5143 }
5144 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5145 it != mStreamInfo.end(); it++) {
5146 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5147 LOGH("Start Processing Channel mask=%d",
5148 channel->getStreamTypeMask());
5149 rc = channel->start();
5150 if (rc < 0) {
5151 LOGE("channel start failed");
5152 pthread_mutex_unlock(&mMutex);
5153 goto error_exit;
5154 }
5155 }
5156
5157 if (mRawDumpChannel) {
5158 LOGD("Starting raw dump stream");
5159 rc = mRawDumpChannel->start();
5160 if (rc != NO_ERROR) {
5161 LOGE("Error Starting Raw Dump Channel");
5162 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5163 it != mStreamInfo.end(); it++) {
5164 QCamera3Channel *channel =
5165 (QCamera3Channel *)(*it)->stream->priv;
5166 LOGH("Stopping Processing Channel mask=%d",
5167 channel->getStreamTypeMask());
5168 channel->stop();
5169 }
5170 if (mSupportChannel)
5171 mSupportChannel->stop();
5172 if (mAnalysisChannel) {
5173 mAnalysisChannel->stop();
5174 }
5175 mMetadataChannel->stop();
5176 pthread_mutex_unlock(&mMutex);
5177 goto error_exit;
5178 }
5179 }
5180
5181 if (mChannelHandle) {
5182
5183 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5184 mChannelHandle);
5185 if (rc != NO_ERROR) {
5186 LOGE("start_channel failed %d", rc);
5187 pthread_mutex_unlock(&mMutex);
5188 goto error_exit;
5189 }
5190 }
5191
5192 goto no_error;
5193error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005194 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005195 return rc;
5196no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005197 mWokenUpByDaemon = false;
5198 mPendingLiveRequest = 0;
5199 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005200 }
5201
Chien-Yu Chenee335912017-02-09 17:53:20 -08005202 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005203 {
5204 Mutex::Autolock l(gHdrPlusClientLock);
5205 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5206 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5207 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5208 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5209 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5210 rc = enableHdrPlusModeLocked();
Chien-Yu Chenee335912017-02-09 17:53:20 -08005211 if (rc != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005212 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -08005213 pthread_mutex_unlock(&mMutex);
5214 return rc;
5215 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005216
5217 mFirstPreviewIntentSeen = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -08005218 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08005219 }
5220
Thierry Strudel3d639192016-09-09 11:52:26 -07005221 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005222 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005223
5224 if (mFlushPerf) {
5225 //we cannot accept any requests during flush
5226 LOGE("process_capture_request cannot proceed during flush");
5227 pthread_mutex_unlock(&mMutex);
5228 return NO_ERROR; //should return an error
5229 }
5230
5231 if (meta.exists(ANDROID_REQUEST_ID)) {
5232 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5233 mCurrentRequestId = request_id;
5234 LOGD("Received request with id: %d", request_id);
5235 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5236 LOGE("Unable to find request id field, \
5237 & no previous id available");
5238 pthread_mutex_unlock(&mMutex);
5239 return NAME_NOT_FOUND;
5240 } else {
5241 LOGD("Re-using old request id");
5242 request_id = mCurrentRequestId;
5243 }
5244
5245 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5246 request->num_output_buffers,
5247 request->input_buffer,
5248 frameNumber);
5249 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005250 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005251 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005252 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005253 uint32_t snapshotStreamId = 0;
5254 for (size_t i = 0; i < request->num_output_buffers; i++) {
5255 const camera3_stream_buffer_t& output = request->output_buffers[i];
5256 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5257
Emilian Peev7650c122017-01-19 08:24:33 -08005258 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5259 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005260 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005261 blob_request = 1;
5262 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5263 }
5264
5265 if (output.acquire_fence != -1) {
5266 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5267 close(output.acquire_fence);
5268 if (rc != OK) {
5269 LOGE("sync wait failed %d", rc);
5270 pthread_mutex_unlock(&mMutex);
5271 return rc;
5272 }
5273 }
5274
Emilian Peev0f3c3162017-03-15 12:57:46 +00005275 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5276 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005277 depthRequestPresent = true;
5278 continue;
5279 }
5280
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005281 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005282 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005283
5284 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5285 isVidBufRequested = true;
5286 }
5287 }
5288
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005289 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5290 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5291 itr++) {
5292 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5293 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5294 channel->getStreamID(channel->getStreamTypeMask());
5295
5296 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5297 isVidBufRequested = true;
5298 }
5299 }
5300
Thierry Strudel3d639192016-09-09 11:52:26 -07005301 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005302 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005303 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005304 }
5305 if (blob_request && mRawDumpChannel) {
5306 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005307 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005308 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005309 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005310 }
5311
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005312 {
5313 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5314 // Request a RAW buffer if
5315 // 1. mHdrPlusRawSrcChannel is valid.
5316 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5317 // 3. There is no pending HDR+ request.
5318 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5319 mHdrPlusPendingRequests.size() == 0) {
5320 streamsArray.stream_request[streamsArray.num_streams].streamID =
5321 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5322 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5323 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005324 }
5325
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005326 //extract capture intent
5327 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5328 mCaptureIntent =
5329 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5330 }
5331
5332 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5333 mCacMode =
5334 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5335 }
5336
5337 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005338 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005339
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005340 {
5341 Mutex::Autolock l(gHdrPlusClientLock);
5342 // If this request has a still capture intent, try to submit an HDR+ request.
5343 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5344 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5345 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5346 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005347 }
5348
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005349 if (hdrPlusRequest) {
5350 // For a HDR+ request, just set the frame parameters.
5351 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5352 if (rc < 0) {
5353 LOGE("fail to set frame parameters");
5354 pthread_mutex_unlock(&mMutex);
5355 return rc;
5356 }
5357 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005358 /* Parse the settings:
5359 * - For every request in NORMAL MODE
5360 * - For every request in HFR mode during preview only case
5361 * - For first request of every batch in HFR mode during video
5362 * recording. In batchmode the same settings except frame number is
5363 * repeated in each request of the batch.
5364 */
5365 if (!mBatchSize ||
5366 (mBatchSize && !isVidBufRequested) ||
5367 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005368 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005369 if (rc < 0) {
5370 LOGE("fail to set frame parameters");
5371 pthread_mutex_unlock(&mMutex);
5372 return rc;
5373 }
5374 }
5375 /* For batchMode HFR, setFrameParameters is not called for every
5376 * request. But only frame number of the latest request is parsed.
5377 * Keep track of first and last frame numbers in a batch so that
5378 * metadata for the frame numbers of batch can be duplicated in
5379 * handleBatchMetadta */
5380 if (mBatchSize) {
5381 if (!mToBeQueuedVidBufs) {
5382 //start of the batch
5383 mFirstFrameNumberInBatch = request->frame_number;
5384 }
5385 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5386 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5387 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005388 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005389 return BAD_VALUE;
5390 }
5391 }
5392 if (mNeedSensorRestart) {
5393 /* Unlock the mutex as restartSensor waits on the channels to be
5394 * stopped, which in turn calls stream callback functions -
5395 * handleBufferWithLock and handleMetadataWithLock */
5396 pthread_mutex_unlock(&mMutex);
5397 rc = dynamicUpdateMetaStreamInfo();
5398 if (rc != NO_ERROR) {
5399 LOGE("Restarting the sensor failed");
5400 return BAD_VALUE;
5401 }
5402 mNeedSensorRestart = false;
5403 pthread_mutex_lock(&mMutex);
5404 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005405 if(mResetInstantAEC) {
5406 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5407 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5408 mResetInstantAEC = false;
5409 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005410 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005411 if (request->input_buffer->acquire_fence != -1) {
5412 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5413 close(request->input_buffer->acquire_fence);
5414 if (rc != OK) {
5415 LOGE("input buffer sync wait failed %d", rc);
5416 pthread_mutex_unlock(&mMutex);
5417 return rc;
5418 }
5419 }
5420 }
5421
5422 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5423 mLastCustIntentFrmNum = frameNumber;
5424 }
5425 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005426 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005427 pendingRequestIterator latestRequest;
5428 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005429 pendingRequest.num_buffers = depthRequestPresent ?
5430 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005431 pendingRequest.request_id = request_id;
5432 pendingRequest.blob_request = blob_request;
5433 pendingRequest.timestamp = 0;
5434 pendingRequest.bUrgentReceived = 0;
5435 if (request->input_buffer) {
5436 pendingRequest.input_buffer =
5437 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5438 *(pendingRequest.input_buffer) = *(request->input_buffer);
5439 pInputBuffer = pendingRequest.input_buffer;
5440 } else {
5441 pendingRequest.input_buffer = NULL;
5442 pInputBuffer = NULL;
5443 }
5444
5445 pendingRequest.pipeline_depth = 0;
5446 pendingRequest.partial_result_cnt = 0;
5447 extractJpegMetadata(mCurJpegMeta, request);
5448 pendingRequest.jpegMetadata = mCurJpegMeta;
5449 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5450 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005451 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005452 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5453 mHybridAeEnable =
5454 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5455 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005456
5457 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5458 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005459 /* DevCamDebug metadata processCaptureRequest */
5460 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5461 mDevCamDebugMetaEnable =
5462 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5463 }
5464 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5465 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005466
5467 //extract CAC info
5468 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5469 mCacMode =
5470 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5471 }
5472 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005473 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005474
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005475 // extract enableZsl info
5476 if (gExposeEnableZslKey) {
5477 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5478 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5479 mZslEnabled = pendingRequest.enableZsl;
5480 } else {
5481 pendingRequest.enableZsl = mZslEnabled;
5482 }
5483 }
5484
Thierry Strudel3d639192016-09-09 11:52:26 -07005485 PendingBuffersInRequest bufsForCurRequest;
5486 bufsForCurRequest.frame_number = frameNumber;
5487 // Mark current timestamp for the new request
5488 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005489 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005490
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005491 if (hdrPlusRequest) {
5492 // Save settings for this request.
5493 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5494 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5495
5496 // Add to pending HDR+ request queue.
5497 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5498 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5499
5500 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5501 }
5502
Thierry Strudel3d639192016-09-09 11:52:26 -07005503 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005504 if ((request->output_buffers[i].stream->data_space ==
5505 HAL_DATASPACE_DEPTH) &&
5506 (HAL_PIXEL_FORMAT_BLOB ==
5507 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005508 continue;
5509 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005510 RequestedBufferInfo requestedBuf;
5511 memset(&requestedBuf, 0, sizeof(requestedBuf));
5512 requestedBuf.stream = request->output_buffers[i].stream;
5513 requestedBuf.buffer = NULL;
5514 pendingRequest.buffers.push_back(requestedBuf);
5515
5516 // Add to buffer handle the pending buffers list
5517 PendingBufferInfo bufferInfo;
5518 bufferInfo.buffer = request->output_buffers[i].buffer;
5519 bufferInfo.stream = request->output_buffers[i].stream;
5520 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5521 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5522 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5523 frameNumber, bufferInfo.buffer,
5524 channel->getStreamTypeMask(), bufferInfo.stream->format);
5525 }
5526 // Add this request packet into mPendingBuffersMap
5527 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5528 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5529 mPendingBuffersMap.get_num_overall_buffers());
5530
5531 latestRequest = mPendingRequestsList.insert(
5532 mPendingRequestsList.end(), pendingRequest);
5533 if(mFlush) {
5534 LOGI("mFlush is true");
5535 pthread_mutex_unlock(&mMutex);
5536 return NO_ERROR;
5537 }
5538
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005539 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5540 // channel.
5541 if (!hdrPlusRequest) {
5542 int indexUsed;
5543 // Notify metadata channel we receive a request
5544 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005545
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005546 if(request->input_buffer != NULL){
5547 LOGD("Input request, frame_number %d", frameNumber);
5548 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5549 if (NO_ERROR != rc) {
5550 LOGE("fail to set reproc parameters");
5551 pthread_mutex_unlock(&mMutex);
5552 return rc;
5553 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005554 }
5555
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005556 // Call request on other streams
5557 uint32_t streams_need_metadata = 0;
5558 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5559 for (size_t i = 0; i < request->num_output_buffers; i++) {
5560 const camera3_stream_buffer_t& output = request->output_buffers[i];
5561 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5562
5563 if (channel == NULL) {
5564 LOGW("invalid channel pointer for stream");
5565 continue;
5566 }
5567
5568 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5569 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5570 output.buffer, request->input_buffer, frameNumber);
5571 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005572 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005573 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5574 if (rc < 0) {
5575 LOGE("Fail to request on picture channel");
5576 pthread_mutex_unlock(&mMutex);
5577 return rc;
5578 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005579 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005580 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5581 assert(NULL != mDepthChannel);
5582 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005583
Emilian Peev7650c122017-01-19 08:24:33 -08005584 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5585 if (rc < 0) {
5586 LOGE("Fail to map on depth buffer");
5587 pthread_mutex_unlock(&mMutex);
5588 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005589 }
Emilian Peev7650c122017-01-19 08:24:33 -08005590 } else {
5591 LOGD("snapshot request with buffer %p, frame_number %d",
5592 output.buffer, frameNumber);
5593 if (!request->settings) {
5594 rc = channel->request(output.buffer, frameNumber,
5595 NULL, mPrevParameters, indexUsed);
5596 } else {
5597 rc = channel->request(output.buffer, frameNumber,
5598 NULL, mParameters, indexUsed);
5599 }
5600 if (rc < 0) {
5601 LOGE("Fail to request on picture channel");
5602 pthread_mutex_unlock(&mMutex);
5603 return rc;
5604 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005605
Emilian Peev7650c122017-01-19 08:24:33 -08005606 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5607 uint32_t j = 0;
5608 for (j = 0; j < streamsArray.num_streams; j++) {
5609 if (streamsArray.stream_request[j].streamID == streamId) {
5610 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5611 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5612 else
5613 streamsArray.stream_request[j].buf_index = indexUsed;
5614 break;
5615 }
5616 }
5617 if (j == streamsArray.num_streams) {
5618 LOGE("Did not find matching stream to update index");
5619 assert(0);
5620 }
5621
5622 pendingBufferIter->need_metadata = true;
5623 streams_need_metadata++;
5624 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005625 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005626 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5627 bool needMetadata = false;
5628 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5629 rc = yuvChannel->request(output.buffer, frameNumber,
5630 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5631 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005632 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005633 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005634 pthread_mutex_unlock(&mMutex);
5635 return rc;
5636 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005637
5638 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5639 uint32_t j = 0;
5640 for (j = 0; j < streamsArray.num_streams; j++) {
5641 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005642 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5643 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5644 else
5645 streamsArray.stream_request[j].buf_index = indexUsed;
5646 break;
5647 }
5648 }
5649 if (j == streamsArray.num_streams) {
5650 LOGE("Did not find matching stream to update index");
5651 assert(0);
5652 }
5653
5654 pendingBufferIter->need_metadata = needMetadata;
5655 if (needMetadata)
5656 streams_need_metadata += 1;
5657 LOGD("calling YUV channel request, need_metadata is %d",
5658 needMetadata);
5659 } else {
5660 LOGD("request with buffer %p, frame_number %d",
5661 output.buffer, frameNumber);
5662
5663 rc = channel->request(output.buffer, frameNumber, indexUsed);
5664
5665 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5666 uint32_t j = 0;
5667 for (j = 0; j < streamsArray.num_streams; j++) {
5668 if (streamsArray.stream_request[j].streamID == streamId) {
5669 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5670 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5671 else
5672 streamsArray.stream_request[j].buf_index = indexUsed;
5673 break;
5674 }
5675 }
5676 if (j == streamsArray.num_streams) {
5677 LOGE("Did not find matching stream to update index");
5678 assert(0);
5679 }
5680
5681 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5682 && mBatchSize) {
5683 mToBeQueuedVidBufs++;
5684 if (mToBeQueuedVidBufs == mBatchSize) {
5685 channel->queueBatchBuf();
5686 }
5687 }
5688 if (rc < 0) {
5689 LOGE("request failed");
5690 pthread_mutex_unlock(&mMutex);
5691 return rc;
5692 }
5693 }
5694 pendingBufferIter++;
5695 }
5696
5697 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5698 itr++) {
5699 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5700
5701 if (channel == NULL) {
5702 LOGE("invalid channel pointer for stream");
5703 assert(0);
5704 return BAD_VALUE;
5705 }
5706
5707 InternalRequest requestedStream;
5708 requestedStream = (*itr);
5709
5710
5711 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5712 LOGD("snapshot request internally input buffer %p, frame_number %d",
5713 request->input_buffer, frameNumber);
5714 if(request->input_buffer != NULL){
5715 rc = channel->request(NULL, frameNumber,
5716 pInputBuffer, &mReprocMeta, indexUsed, true,
5717 requestedStream.meteringOnly);
5718 if (rc < 0) {
5719 LOGE("Fail to request on picture channel");
5720 pthread_mutex_unlock(&mMutex);
5721 return rc;
5722 }
5723 } else {
5724 LOGD("snapshot request with frame_number %d", frameNumber);
5725 if (!request->settings) {
5726 rc = channel->request(NULL, frameNumber,
5727 NULL, mPrevParameters, indexUsed, true,
5728 requestedStream.meteringOnly);
5729 } else {
5730 rc = channel->request(NULL, frameNumber,
5731 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5732 }
5733 if (rc < 0) {
5734 LOGE("Fail to request on picture channel");
5735 pthread_mutex_unlock(&mMutex);
5736 return rc;
5737 }
5738
5739 if ((*itr).meteringOnly != 1) {
5740 requestedStream.need_metadata = 1;
5741 streams_need_metadata++;
5742 }
5743 }
5744
5745 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5746 uint32_t j = 0;
5747 for (j = 0; j < streamsArray.num_streams; j++) {
5748 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005749 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5750 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5751 else
5752 streamsArray.stream_request[j].buf_index = indexUsed;
5753 break;
5754 }
5755 }
5756 if (j == streamsArray.num_streams) {
5757 LOGE("Did not find matching stream to update index");
5758 assert(0);
5759 }
5760
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005761 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005762 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005763 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005764 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005765 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005766 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005767 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005768
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005769 //If 2 streams have need_metadata set to true, fail the request, unless
5770 //we copy/reference count the metadata buffer
5771 if (streams_need_metadata > 1) {
5772 LOGE("not supporting request in which two streams requires"
5773 " 2 HAL metadata for reprocessing");
5774 pthread_mutex_unlock(&mMutex);
5775 return -EINVAL;
5776 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005777
Emilian Peev7650c122017-01-19 08:24:33 -08005778 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5779 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5780 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5781 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5782 pthread_mutex_unlock(&mMutex);
5783 return BAD_VALUE;
5784 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005785 if (request->input_buffer == NULL) {
5786 /* Set the parameters to backend:
5787 * - For every request in NORMAL MODE
5788 * - For every request in HFR mode during preview only case
5789 * - Once every batch in HFR mode during video recording
5790 */
5791 if (!mBatchSize ||
5792 (mBatchSize && !isVidBufRequested) ||
5793 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5794 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5795 mBatchSize, isVidBufRequested,
5796 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005797
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005798 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5799 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5800 uint32_t m = 0;
5801 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5802 if (streamsArray.stream_request[k].streamID ==
5803 mBatchedStreamsArray.stream_request[m].streamID)
5804 break;
5805 }
5806 if (m == mBatchedStreamsArray.num_streams) {
5807 mBatchedStreamsArray.stream_request\
5808 [mBatchedStreamsArray.num_streams].streamID =
5809 streamsArray.stream_request[k].streamID;
5810 mBatchedStreamsArray.stream_request\
5811 [mBatchedStreamsArray.num_streams].buf_index =
5812 streamsArray.stream_request[k].buf_index;
5813 mBatchedStreamsArray.num_streams =
5814 mBatchedStreamsArray.num_streams + 1;
5815 }
5816 }
5817 streamsArray = mBatchedStreamsArray;
5818 }
5819 /* Update stream id of all the requested buffers */
5820 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5821 streamsArray)) {
5822 LOGE("Failed to set stream type mask in the parameters");
5823 return BAD_VALUE;
5824 }
5825
5826 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5827 mParameters);
5828 if (rc < 0) {
5829 LOGE("set_parms failed");
5830 }
5831 /* reset to zero coz, the batch is queued */
5832 mToBeQueuedVidBufs = 0;
5833 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5834 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5835 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005836 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5837 uint32_t m = 0;
5838 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5839 if (streamsArray.stream_request[k].streamID ==
5840 mBatchedStreamsArray.stream_request[m].streamID)
5841 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005842 }
5843 if (m == mBatchedStreamsArray.num_streams) {
5844 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5845 streamID = streamsArray.stream_request[k].streamID;
5846 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5847 buf_index = streamsArray.stream_request[k].buf_index;
5848 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5849 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005850 }
5851 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005852 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005853 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005854 }
5855
5856 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5857
5858 mState = STARTED;
5859 // Added a timed condition wait
5860 struct timespec ts;
5861 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005862 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005863 if (rc < 0) {
5864 isValidTimeout = 0;
5865 LOGE("Error reading the real time clock!!");
5866 }
5867 else {
5868 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005869 int64_t timeout = 5;
5870 {
5871 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5872 // If there is a pending HDR+ request, the following requests may be blocked until the
5873 // HDR+ request is done. So allow a longer timeout.
5874 if (mHdrPlusPendingRequests.size() > 0) {
5875 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5876 }
5877 }
5878 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005879 }
5880 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005881 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005882 (mState != ERROR) && (mState != DEINIT)) {
5883 if (!isValidTimeout) {
5884 LOGD("Blocking on conditional wait");
5885 pthread_cond_wait(&mRequestCond, &mMutex);
5886 }
5887 else {
5888 LOGD("Blocking on timed conditional wait");
5889 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5890 if (rc == ETIMEDOUT) {
5891 rc = -ENODEV;
5892 LOGE("Unblocked on timeout!!!!");
5893 break;
5894 }
5895 }
5896 LOGD("Unblocked");
5897 if (mWokenUpByDaemon) {
5898 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005899 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005900 break;
5901 }
5902 }
5903 pthread_mutex_unlock(&mMutex);
5904
5905 return rc;
5906}
5907
5908/*===========================================================================
5909 * FUNCTION : dump
5910 *
5911 * DESCRIPTION:
5912 *
5913 * PARAMETERS :
5914 *
5915 *
5916 * RETURN :
5917 *==========================================================================*/
5918void QCamera3HardwareInterface::dump(int fd)
5919{
5920 pthread_mutex_lock(&mMutex);
5921 dprintf(fd, "\n Camera HAL3 information Begin \n");
5922
5923 dprintf(fd, "\nNumber of pending requests: %zu \n",
5924 mPendingRequestsList.size());
5925 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5926 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5927 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5928 for(pendingRequestIterator i = mPendingRequestsList.begin();
5929 i != mPendingRequestsList.end(); i++) {
5930 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5931 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5932 i->input_buffer);
5933 }
5934 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5935 mPendingBuffersMap.get_num_overall_buffers());
5936 dprintf(fd, "-------+------------------\n");
5937 dprintf(fd, " Frame | Stream type mask \n");
5938 dprintf(fd, "-------+------------------\n");
5939 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5940 for(auto &j : req.mPendingBufferList) {
5941 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5942 dprintf(fd, " %5d | %11d \n",
5943 req.frame_number, channel->getStreamTypeMask());
5944 }
5945 }
5946 dprintf(fd, "-------+------------------\n");
5947
5948 dprintf(fd, "\nPending frame drop list: %zu\n",
5949 mPendingFrameDropList.size());
5950 dprintf(fd, "-------+-----------\n");
5951 dprintf(fd, " Frame | Stream ID \n");
5952 dprintf(fd, "-------+-----------\n");
5953 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5954 i != mPendingFrameDropList.end(); i++) {
5955 dprintf(fd, " %5d | %9d \n",
5956 i->frame_number, i->stream_ID);
5957 }
5958 dprintf(fd, "-------+-----------\n");
5959
5960 dprintf(fd, "\n Camera HAL3 information End \n");
5961
5962 /* use dumpsys media.camera as trigger to send update debug level event */
5963 mUpdateDebugLevel = true;
5964 pthread_mutex_unlock(&mMutex);
5965 return;
5966}
5967
5968/*===========================================================================
5969 * FUNCTION : flush
5970 *
5971 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5972 * conditionally restarts channels
5973 *
5974 * PARAMETERS :
5975 * @ restartChannels: re-start all channels
5976 *
5977 *
5978 * RETURN :
5979 * 0 on success
5980 * Error code on failure
5981 *==========================================================================*/
5982int QCamera3HardwareInterface::flush(bool restartChannels)
5983{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005984 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005985 int32_t rc = NO_ERROR;
5986
5987 LOGD("Unblocking Process Capture Request");
5988 pthread_mutex_lock(&mMutex);
5989 mFlush = true;
5990 pthread_mutex_unlock(&mMutex);
5991
5992 rc = stopAllChannels();
5993 // unlink of dualcam
5994 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005995 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5996 &m_pDualCamCmdPtr->bundle_info;
5997 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005998 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5999 pthread_mutex_lock(&gCamLock);
6000
6001 if (mIsMainCamera == 1) {
6002 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6003 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006004 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006005 // related session id should be session id of linked session
6006 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6007 } else {
6008 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6009 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006010 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006011 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6012 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006013 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006014 pthread_mutex_unlock(&gCamLock);
6015
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006016 rc = mCameraHandle->ops->set_dual_cam_cmd(
6017 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006018 if (rc < 0) {
6019 LOGE("Dualcam: Unlink failed, but still proceed to close");
6020 }
6021 }
6022
6023 if (rc < 0) {
6024 LOGE("stopAllChannels failed");
6025 return rc;
6026 }
6027 if (mChannelHandle) {
6028 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6029 mChannelHandle);
6030 }
6031
6032 // Reset bundle info
6033 rc = setBundleInfo();
6034 if (rc < 0) {
6035 LOGE("setBundleInfo failed %d", rc);
6036 return rc;
6037 }
6038
6039 // Mutex Lock
6040 pthread_mutex_lock(&mMutex);
6041
6042 // Unblock process_capture_request
6043 mPendingLiveRequest = 0;
6044 pthread_cond_signal(&mRequestCond);
6045
6046 rc = notifyErrorForPendingRequests();
6047 if (rc < 0) {
6048 LOGE("notifyErrorForPendingRequests failed");
6049 pthread_mutex_unlock(&mMutex);
6050 return rc;
6051 }
6052
6053 mFlush = false;
6054
6055 // Start the Streams/Channels
6056 if (restartChannels) {
6057 rc = startAllChannels();
6058 if (rc < 0) {
6059 LOGE("startAllChannels failed");
6060 pthread_mutex_unlock(&mMutex);
6061 return rc;
6062 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006063 if (mChannelHandle) {
6064 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6065 mChannelHandle);
6066 if (rc < 0) {
6067 LOGE("start_channel failed");
6068 pthread_mutex_unlock(&mMutex);
6069 return rc;
6070 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006071 }
6072 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006073 pthread_mutex_unlock(&mMutex);
6074
6075 return 0;
6076}
6077
6078/*===========================================================================
6079 * FUNCTION : flushPerf
6080 *
6081 * DESCRIPTION: This is the performance optimization version of flush that does
6082 * not use stream off, rather flushes the system
6083 *
6084 * PARAMETERS :
6085 *
6086 *
6087 * RETURN : 0 : success
6088 * -EINVAL: input is malformed (device is not valid)
6089 * -ENODEV: if the device has encountered a serious error
6090 *==========================================================================*/
6091int QCamera3HardwareInterface::flushPerf()
6092{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006093 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006094 int32_t rc = 0;
6095 struct timespec timeout;
6096 bool timed_wait = false;
6097
6098 pthread_mutex_lock(&mMutex);
6099 mFlushPerf = true;
6100 mPendingBuffersMap.numPendingBufsAtFlush =
6101 mPendingBuffersMap.get_num_overall_buffers();
6102 LOGD("Calling flush. Wait for %d buffers to return",
6103 mPendingBuffersMap.numPendingBufsAtFlush);
6104
6105 /* send the flush event to the backend */
6106 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6107 if (rc < 0) {
6108 LOGE("Error in flush: IOCTL failure");
6109 mFlushPerf = false;
6110 pthread_mutex_unlock(&mMutex);
6111 return -ENODEV;
6112 }
6113
6114 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6115 LOGD("No pending buffers in HAL, return flush");
6116 mFlushPerf = false;
6117 pthread_mutex_unlock(&mMutex);
6118 return rc;
6119 }
6120
6121 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006122 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006123 if (rc < 0) {
6124 LOGE("Error reading the real time clock, cannot use timed wait");
6125 } else {
6126 timeout.tv_sec += FLUSH_TIMEOUT;
6127 timed_wait = true;
6128 }
6129
6130 //Block on conditional variable
6131 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6132 LOGD("Waiting on mBuffersCond");
6133 if (!timed_wait) {
6134 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6135 if (rc != 0) {
6136 LOGE("pthread_cond_wait failed due to rc = %s",
6137 strerror(rc));
6138 break;
6139 }
6140 } else {
6141 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6142 if (rc != 0) {
6143 LOGE("pthread_cond_timedwait failed due to rc = %s",
6144 strerror(rc));
6145 break;
6146 }
6147 }
6148 }
6149 if (rc != 0) {
6150 mFlushPerf = false;
6151 pthread_mutex_unlock(&mMutex);
6152 return -ENODEV;
6153 }
6154
6155 LOGD("Received buffers, now safe to return them");
6156
6157 //make sure the channels handle flush
6158 //currently only required for the picture channel to release snapshot resources
6159 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6160 it != mStreamInfo.end(); it++) {
6161 QCamera3Channel *channel = (*it)->channel;
6162 if (channel) {
6163 rc = channel->flush();
6164 if (rc) {
6165 LOGE("Flushing the channels failed with error %d", rc);
6166 // even though the channel flush failed we need to continue and
6167 // return the buffers we have to the framework, however the return
6168 // value will be an error
6169 rc = -ENODEV;
6170 }
6171 }
6172 }
6173
6174 /* notify the frameworks and send errored results */
6175 rc = notifyErrorForPendingRequests();
6176 if (rc < 0) {
6177 LOGE("notifyErrorForPendingRequests failed");
6178 pthread_mutex_unlock(&mMutex);
6179 return rc;
6180 }
6181
6182 //unblock process_capture_request
6183 mPendingLiveRequest = 0;
6184 unblockRequestIfNecessary();
6185
6186 mFlushPerf = false;
6187 pthread_mutex_unlock(&mMutex);
6188 LOGD ("Flush Operation complete. rc = %d", rc);
6189 return rc;
6190}
6191
6192/*===========================================================================
6193 * FUNCTION : handleCameraDeviceError
6194 *
6195 * DESCRIPTION: This function calls internal flush and notifies the error to
6196 * framework and updates the state variable.
6197 *
6198 * PARAMETERS : None
6199 *
6200 * RETURN : NO_ERROR on Success
6201 * Error code on failure
6202 *==========================================================================*/
6203int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6204{
6205 int32_t rc = NO_ERROR;
6206
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006207 {
6208 Mutex::Autolock lock(mFlushLock);
6209 pthread_mutex_lock(&mMutex);
6210 if (mState != ERROR) {
6211 //if mState != ERROR, nothing to be done
6212 pthread_mutex_unlock(&mMutex);
6213 return NO_ERROR;
6214 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006215 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006216
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006217 rc = flush(false /* restart channels */);
6218 if (NO_ERROR != rc) {
6219 LOGE("internal flush to handle mState = ERROR failed");
6220 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006221
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006222 pthread_mutex_lock(&mMutex);
6223 mState = DEINIT;
6224 pthread_mutex_unlock(&mMutex);
6225 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006226
6227 camera3_notify_msg_t notify_msg;
6228 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6229 notify_msg.type = CAMERA3_MSG_ERROR;
6230 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6231 notify_msg.message.error.error_stream = NULL;
6232 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006233 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006234
6235 return rc;
6236}
6237
6238/*===========================================================================
6239 * FUNCTION : captureResultCb
6240 *
6241 * DESCRIPTION: Callback handler for all capture result
6242 * (streams, as well as metadata)
6243 *
6244 * PARAMETERS :
6245 * @metadata : metadata information
6246 * @buffer : actual gralloc buffer to be returned to frameworks.
6247 * NULL if metadata.
6248 *
6249 * RETURN : NONE
6250 *==========================================================================*/
6251void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6252 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6253{
6254 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006255 pthread_mutex_lock(&mMutex);
6256 uint8_t batchSize = mBatchSize;
6257 pthread_mutex_unlock(&mMutex);
6258 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006259 handleBatchMetadata(metadata_buf,
6260 true /* free_and_bufdone_meta_buf */);
6261 } else { /* mBatchSize = 0 */
6262 hdrPlusPerfLock(metadata_buf);
6263 pthread_mutex_lock(&mMutex);
6264 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006265 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006266 true /* last urgent frame of batch metadata */,
6267 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006268 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006269 pthread_mutex_unlock(&mMutex);
6270 }
6271 } else if (isInputBuffer) {
6272 pthread_mutex_lock(&mMutex);
6273 handleInputBufferWithLock(frame_number);
6274 pthread_mutex_unlock(&mMutex);
6275 } else {
6276 pthread_mutex_lock(&mMutex);
6277 handleBufferWithLock(buffer, frame_number);
6278 pthread_mutex_unlock(&mMutex);
6279 }
6280 return;
6281}
6282
6283/*===========================================================================
6284 * FUNCTION : getReprocessibleOutputStreamId
6285 *
6286 * DESCRIPTION: Get source output stream id for the input reprocess stream
6287 * based on size and format, which would be the largest
6288 * output stream if an input stream exists.
6289 *
6290 * PARAMETERS :
6291 * @id : return the stream id if found
6292 *
6293 * RETURN : int32_t type of status
6294 * NO_ERROR -- success
6295 * none-zero failure code
6296 *==========================================================================*/
6297int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6298{
6299 /* check if any output or bidirectional stream with the same size and format
6300 and return that stream */
6301 if ((mInputStreamInfo.dim.width > 0) &&
6302 (mInputStreamInfo.dim.height > 0)) {
6303 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6304 it != mStreamInfo.end(); it++) {
6305
6306 camera3_stream_t *stream = (*it)->stream;
6307 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6308 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6309 (stream->format == mInputStreamInfo.format)) {
6310 // Usage flag for an input stream and the source output stream
6311 // may be different.
6312 LOGD("Found reprocessible output stream! %p", *it);
6313 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6314 stream->usage, mInputStreamInfo.usage);
6315
6316 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6317 if (channel != NULL && channel->mStreams[0]) {
6318 id = channel->mStreams[0]->getMyServerID();
6319 return NO_ERROR;
6320 }
6321 }
6322 }
6323 } else {
6324 LOGD("No input stream, so no reprocessible output stream");
6325 }
6326 return NAME_NOT_FOUND;
6327}
6328
6329/*===========================================================================
6330 * FUNCTION : lookupFwkName
6331 *
6332 * DESCRIPTION: In case the enum is not same in fwk and backend
6333 * make sure the parameter is correctly propogated
6334 *
6335 * PARAMETERS :
6336 * @arr : map between the two enums
6337 * @len : len of the map
6338 * @hal_name : name of the hal_parm to map
6339 *
6340 * RETURN : int type of status
6341 * fwk_name -- success
6342 * none-zero failure code
6343 *==========================================================================*/
6344template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6345 size_t len, halType hal_name)
6346{
6347
6348 for (size_t i = 0; i < len; i++) {
6349 if (arr[i].hal_name == hal_name) {
6350 return arr[i].fwk_name;
6351 }
6352 }
6353
6354 /* Not able to find matching framework type is not necessarily
6355 * an error case. This happens when mm-camera supports more attributes
6356 * than the frameworks do */
6357 LOGH("Cannot find matching framework type");
6358 return NAME_NOT_FOUND;
6359}
6360
6361/*===========================================================================
6362 * FUNCTION : lookupHalName
6363 *
6364 * DESCRIPTION: In case the enum is not same in fwk and backend
6365 * make sure the parameter is correctly propogated
6366 *
6367 * PARAMETERS :
6368 * @arr : map between the two enums
6369 * @len : len of the map
6370 * @fwk_name : name of the hal_parm to map
6371 *
6372 * RETURN : int32_t type of status
6373 * hal_name -- success
6374 * none-zero failure code
6375 *==========================================================================*/
6376template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6377 size_t len, fwkType fwk_name)
6378{
6379 for (size_t i = 0; i < len; i++) {
6380 if (arr[i].fwk_name == fwk_name) {
6381 return arr[i].hal_name;
6382 }
6383 }
6384
6385 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6386 return NAME_NOT_FOUND;
6387}
6388
6389/*===========================================================================
6390 * FUNCTION : lookupProp
6391 *
6392 * DESCRIPTION: lookup a value by its name
6393 *
6394 * PARAMETERS :
6395 * @arr : map between the two enums
6396 * @len : size of the map
6397 * @name : name to be looked up
6398 *
6399 * RETURN : Value if found
6400 * CAM_CDS_MODE_MAX if not found
6401 *==========================================================================*/
6402template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6403 size_t len, const char *name)
6404{
6405 if (name) {
6406 for (size_t i = 0; i < len; i++) {
6407 if (!strcmp(arr[i].desc, name)) {
6408 return arr[i].val;
6409 }
6410 }
6411 }
6412 return CAM_CDS_MODE_MAX;
6413}
6414
6415/*===========================================================================
6416 *
6417 * DESCRIPTION:
6418 *
6419 * PARAMETERS :
6420 * @metadata : metadata information from callback
6421 * @timestamp: metadata buffer timestamp
6422 * @request_id: request id
6423 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006424 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006425 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6426 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006427 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006428 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6429 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006430 *
6431 * RETURN : camera_metadata_t*
6432 * metadata in a format specified by fwk
6433 *==========================================================================*/
6434camera_metadata_t*
6435QCamera3HardwareInterface::translateFromHalMetadata(
6436 metadata_buffer_t *metadata,
6437 nsecs_t timestamp,
6438 int32_t request_id,
6439 const CameraMetadata& jpegMetadata,
6440 uint8_t pipeline_depth,
6441 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006442 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006443 /* DevCamDebug metadata translateFromHalMetadata argument */
6444 uint8_t DevCamDebug_meta_enable,
6445 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006446 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006447 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006448 bool lastMetadataInBatch,
6449 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006450{
6451 CameraMetadata camMetadata;
6452 camera_metadata_t *resultMetadata;
6453
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006454 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006455 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6456 * Timestamp is needed because it's used for shutter notify calculation.
6457 * */
6458 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6459 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006460 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006461 }
6462
Thierry Strudel3d639192016-09-09 11:52:26 -07006463 if (jpegMetadata.entryCount())
6464 camMetadata.append(jpegMetadata);
6465
6466 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6467 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6468 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6469 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006470 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006471 if (mBatchSize == 0) {
6472 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6473 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6474 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006475
Samuel Ha68ba5172016-12-15 18:41:12 -08006476 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6477 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6478 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6479 // DevCamDebug metadata translateFromHalMetadata AF
6480 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6481 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6482 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6483 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6484 }
6485 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6486 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6487 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6488 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6489 }
6490 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6491 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6492 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6493 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6494 }
6495 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6496 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6497 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6498 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6499 }
6500 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6501 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6502 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6503 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6504 }
6505 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6506 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6507 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6508 *DevCamDebug_af_monitor_pdaf_target_pos;
6509 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6510 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6511 }
6512 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6513 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6514 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6515 *DevCamDebug_af_monitor_pdaf_confidence;
6516 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6517 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6518 }
6519 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6520 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6521 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6522 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6523 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6524 }
6525 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6526 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6527 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6528 *DevCamDebug_af_monitor_tof_target_pos;
6529 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6530 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6531 }
6532 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6533 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6534 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6535 *DevCamDebug_af_monitor_tof_confidence;
6536 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6537 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6538 }
6539 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6540 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6541 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6542 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6543 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6544 }
6545 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6546 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6547 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6548 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6549 &fwk_DevCamDebug_af_monitor_type_select, 1);
6550 }
6551 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6552 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6553 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6554 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6555 &fwk_DevCamDebug_af_monitor_refocus, 1);
6556 }
6557 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6558 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6559 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6560 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6561 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6562 }
6563 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6564 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6565 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6566 *DevCamDebug_af_search_pdaf_target_pos;
6567 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6568 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6569 }
6570 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6571 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6572 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6573 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6574 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6575 }
6576 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6577 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6578 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6579 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6580 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6581 }
6582 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6583 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6584 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6585 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6586 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6587 }
6588 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6589 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6590 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6591 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6592 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6593 }
6594 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6595 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6596 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6597 *DevCamDebug_af_search_tof_target_pos;
6598 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6599 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6600 }
6601 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6602 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6603 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6604 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6605 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6606 }
6607 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6608 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6609 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6610 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6611 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6612 }
6613 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6614 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6615 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6616 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6617 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6618 }
6619 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6620 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6621 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6622 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6623 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6624 }
6625 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6626 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6627 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6628 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6629 &fwk_DevCamDebug_af_search_type_select, 1);
6630 }
6631 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6632 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6633 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6634 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6635 &fwk_DevCamDebug_af_search_next_pos, 1);
6636 }
6637 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6638 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6639 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6640 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6641 &fwk_DevCamDebug_af_search_target_pos, 1);
6642 }
6643 // DevCamDebug metadata translateFromHalMetadata AEC
6644 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6645 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6646 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6647 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6648 }
6649 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6650 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6651 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6652 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6653 }
6654 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6655 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6656 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6657 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6658 }
6659 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6660 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6661 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6662 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6663 }
6664 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6665 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6666 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6667 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6668 }
6669 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6670 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6671 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6672 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6673 }
6674 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6675 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6676 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6677 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6678 }
6679 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6680 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6681 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6682 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6683 }
Samuel Ha34229982017-02-17 13:51:11 -08006684 // DevCamDebug metadata translateFromHalMetadata zzHDR
6685 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6686 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6687 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6688 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6689 }
6690 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6691 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006692 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006693 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6694 }
6695 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6696 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6697 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6698 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6699 }
6700 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6701 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006702 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006703 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6704 }
6705 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6706 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6707 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6708 *DevCamDebug_aec_hdr_sensitivity_ratio;
6709 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6710 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6711 }
6712 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6713 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6714 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6715 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6716 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6717 }
6718 // DevCamDebug metadata translateFromHalMetadata ADRC
6719 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6720 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6721 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6722 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6723 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6724 }
6725 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6726 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6727 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6728 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6729 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6730 }
6731 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6732 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6733 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6734 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6735 }
6736 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6737 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6738 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6739 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6740 }
6741 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6742 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6743 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6744 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6745 }
6746 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6747 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6748 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6749 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6750 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006751 // DevCamDebug metadata translateFromHalMetadata AWB
6752 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6753 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6754 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6755 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6756 }
6757 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6758 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6759 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6760 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6761 }
6762 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6763 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6764 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6765 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6766 }
6767 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6768 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6769 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6770 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6771 }
6772 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6773 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6774 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6775 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6776 }
6777 }
6778 // atrace_end(ATRACE_TAG_ALWAYS);
6779
Thierry Strudel3d639192016-09-09 11:52:26 -07006780 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6781 int64_t fwk_frame_number = *frame_number;
6782 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6783 }
6784
6785 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6786 int32_t fps_range[2];
6787 fps_range[0] = (int32_t)float_range->min_fps;
6788 fps_range[1] = (int32_t)float_range->max_fps;
6789 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6790 fps_range, 2);
6791 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6792 fps_range[0], fps_range[1]);
6793 }
6794
6795 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6796 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6797 }
6798
6799 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6800 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6801 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6802 *sceneMode);
6803 if (NAME_NOT_FOUND != val) {
6804 uint8_t fwkSceneMode = (uint8_t)val;
6805 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6806 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6807 fwkSceneMode);
6808 }
6809 }
6810
6811 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6812 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6813 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6814 }
6815
6816 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6817 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6818 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6819 }
6820
6821 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6822 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6823 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6824 }
6825
6826 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6827 CAM_INTF_META_EDGE_MODE, metadata) {
6828 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6829 }
6830
6831 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6832 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6833 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6834 }
6835
6836 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6837 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6838 }
6839
6840 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6841 if (0 <= *flashState) {
6842 uint8_t fwk_flashState = (uint8_t) *flashState;
6843 if (!gCamCapability[mCameraId]->flash_available) {
6844 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6845 }
6846 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6847 }
6848 }
6849
6850 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6851 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6852 if (NAME_NOT_FOUND != val) {
6853 uint8_t fwk_flashMode = (uint8_t)val;
6854 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6855 }
6856 }
6857
6858 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6859 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6860 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6861 }
6862
6863 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6864 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6865 }
6866
6867 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6868 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6869 }
6870
6871 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6872 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6873 }
6874
6875 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6876 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6877 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6878 }
6879
6880 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6881 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6882 LOGD("fwk_videoStab = %d", fwk_videoStab);
6883 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6884 } else {
6885 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6886 // and so hardcoding the Video Stab result to OFF mode.
6887 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6888 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006889 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006890 }
6891
6892 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6893 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6894 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6895 }
6896
6897 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6898 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6899 }
6900
Thierry Strudel3d639192016-09-09 11:52:26 -07006901 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6902 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006903 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006904
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006905 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6906 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006907
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006908 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006909 blackLevelAppliedPattern->cam_black_level[0],
6910 blackLevelAppliedPattern->cam_black_level[1],
6911 blackLevelAppliedPattern->cam_black_level[2],
6912 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006913 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6914 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006915
6916#ifndef USE_HAL_3_3
6917 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05306918 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07006919 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05306920 fwk_blackLevelInd[0] /= 16.0;
6921 fwk_blackLevelInd[1] /= 16.0;
6922 fwk_blackLevelInd[2] /= 16.0;
6923 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006924 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6925 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006926#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006927 }
6928
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006929#ifndef USE_HAL_3_3
6930 // Fixed whitelevel is used by ISP/Sensor
6931 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6932 &gCamCapability[mCameraId]->white_level, 1);
6933#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006934
6935 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6936 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6937 int32_t scalerCropRegion[4];
6938 scalerCropRegion[0] = hScalerCropRegion->left;
6939 scalerCropRegion[1] = hScalerCropRegion->top;
6940 scalerCropRegion[2] = hScalerCropRegion->width;
6941 scalerCropRegion[3] = hScalerCropRegion->height;
6942
6943 // Adjust crop region from sensor output coordinate system to active
6944 // array coordinate system.
6945 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6946 scalerCropRegion[2], scalerCropRegion[3]);
6947
6948 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6949 }
6950
6951 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6952 LOGD("sensorExpTime = %lld", *sensorExpTime);
6953 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6954 }
6955
6956 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6957 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6958 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6959 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6960 }
6961
6962 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6963 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6964 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6965 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6966 sensorRollingShutterSkew, 1);
6967 }
6968
6969 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6970 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6971 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6972
6973 //calculate the noise profile based on sensitivity
6974 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6975 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6976 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6977 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6978 noise_profile[i] = noise_profile_S;
6979 noise_profile[i+1] = noise_profile_O;
6980 }
6981 LOGD("noise model entry (S, O) is (%f, %f)",
6982 noise_profile_S, noise_profile_O);
6983 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6984 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6985 }
6986
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006987#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006988 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006989 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006990 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006991 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006992 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6993 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6994 }
6995 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006996#endif
6997
Thierry Strudel3d639192016-09-09 11:52:26 -07006998 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6999 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7000 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7001 }
7002
7003 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7004 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7005 *faceDetectMode);
7006 if (NAME_NOT_FOUND != val) {
7007 uint8_t fwk_faceDetectMode = (uint8_t)val;
7008 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7009
7010 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7011 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7012 CAM_INTF_META_FACE_DETECTION, metadata) {
7013 uint8_t numFaces = MIN(
7014 faceDetectionInfo->num_faces_detected, MAX_ROI);
7015 int32_t faceIds[MAX_ROI];
7016 uint8_t faceScores[MAX_ROI];
7017 int32_t faceRectangles[MAX_ROI * 4];
7018 int32_t faceLandmarks[MAX_ROI * 6];
7019 size_t j = 0, k = 0;
7020
7021 for (size_t i = 0; i < numFaces; i++) {
7022 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7023 // Adjust crop region from sensor output coordinate system to active
7024 // array coordinate system.
7025 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7026 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7027 rect.width, rect.height);
7028
7029 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7030 faceRectangles+j, -1);
7031
7032 j+= 4;
7033 }
7034 if (numFaces <= 0) {
7035 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7036 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7037 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7038 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7039 }
7040
7041 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7042 numFaces);
7043 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7044 faceRectangles, numFaces * 4U);
7045 if (fwk_faceDetectMode ==
7046 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7047 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7048 CAM_INTF_META_FACE_LANDMARK, metadata) {
7049
7050 for (size_t i = 0; i < numFaces; i++) {
7051 // Map the co-ordinate sensor output coordinate system to active
7052 // array coordinate system.
7053 mCropRegionMapper.toActiveArray(
7054 landmarks->face_landmarks[i].left_eye_center.x,
7055 landmarks->face_landmarks[i].left_eye_center.y);
7056 mCropRegionMapper.toActiveArray(
7057 landmarks->face_landmarks[i].right_eye_center.x,
7058 landmarks->face_landmarks[i].right_eye_center.y);
7059 mCropRegionMapper.toActiveArray(
7060 landmarks->face_landmarks[i].mouth_center.x,
7061 landmarks->face_landmarks[i].mouth_center.y);
7062
7063 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007064 k+= TOTAL_LANDMARK_INDICES;
7065 }
7066 } else {
7067 for (size_t i = 0; i < numFaces; i++) {
7068 setInvalidLandmarks(faceLandmarks+k);
7069 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007070 }
7071 }
7072
7073 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7074 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7075 faceLandmarks, numFaces * 6U);
7076 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007077 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7078 CAM_INTF_META_FACE_BLINK, metadata) {
7079 uint8_t detected[MAX_ROI];
7080 uint8_t degree[MAX_ROI * 2];
7081 for (size_t i = 0; i < numFaces; i++) {
7082 detected[i] = blinks->blink[i].blink_detected;
7083 degree[2 * i] = blinks->blink[i].left_blink;
7084 degree[2 * i + 1] = blinks->blink[i].right_blink;
7085 }
7086 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7087 detected, numFaces);
7088 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7089 degree, numFaces * 2);
7090 }
7091 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7092 CAM_INTF_META_FACE_SMILE, metadata) {
7093 uint8_t degree[MAX_ROI];
7094 uint8_t confidence[MAX_ROI];
7095 for (size_t i = 0; i < numFaces; i++) {
7096 degree[i] = smiles->smile[i].smile_degree;
7097 confidence[i] = smiles->smile[i].smile_confidence;
7098 }
7099 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7100 degree, numFaces);
7101 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7102 confidence, numFaces);
7103 }
7104 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7105 CAM_INTF_META_FACE_GAZE, metadata) {
7106 int8_t angle[MAX_ROI];
7107 int32_t direction[MAX_ROI * 3];
7108 int8_t degree[MAX_ROI * 2];
7109 for (size_t i = 0; i < numFaces; i++) {
7110 angle[i] = gazes->gaze[i].gaze_angle;
7111 direction[3 * i] = gazes->gaze[i].updown_dir;
7112 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7113 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7114 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7115 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
7116 }
7117 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7118 (uint8_t *)angle, numFaces);
7119 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7120 direction, numFaces * 3);
7121 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7122 (uint8_t *)degree, numFaces * 2);
7123 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007124 }
7125 }
7126 }
7127 }
7128
7129 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7130 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007131 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007132 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007133 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007134
Shuzhen Wang14415f52016-11-16 18:26:18 -08007135 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7136 histogramBins = *histBins;
7137 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7138 }
7139
7140 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007141 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7142 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007143 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007144
7145 switch (stats_data->type) {
7146 case CAM_HISTOGRAM_TYPE_BAYER:
7147 switch (stats_data->bayer_stats.data_type) {
7148 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007149 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7150 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007151 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007152 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7153 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007154 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007155 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7156 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007157 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007158 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007159 case CAM_STATS_CHANNEL_R:
7160 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007161 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7162 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007163 }
7164 break;
7165 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007166 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007167 break;
7168 }
7169
Shuzhen Wang14415f52016-11-16 18:26:18 -08007170 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007171 }
7172 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007173 }
7174
7175 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7176 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7177 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7178 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7179 }
7180
7181 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7182 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7183 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7184 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7185 }
7186
7187 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7188 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7189 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7190 CAM_MAX_SHADING_MAP_HEIGHT);
7191 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7192 CAM_MAX_SHADING_MAP_WIDTH);
7193 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7194 lensShadingMap->lens_shading, 4U * map_width * map_height);
7195 }
7196
7197 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7198 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7199 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7200 }
7201
7202 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7203 //Populate CAM_INTF_META_TONEMAP_CURVES
7204 /* ch0 = G, ch 1 = B, ch 2 = R*/
7205 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7206 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7207 tonemap->tonemap_points_cnt,
7208 CAM_MAX_TONEMAP_CURVE_SIZE);
7209 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7210 }
7211
7212 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7213 &tonemap->curves[0].tonemap_points[0][0],
7214 tonemap->tonemap_points_cnt * 2);
7215
7216 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7217 &tonemap->curves[1].tonemap_points[0][0],
7218 tonemap->tonemap_points_cnt * 2);
7219
7220 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7221 &tonemap->curves[2].tonemap_points[0][0],
7222 tonemap->tonemap_points_cnt * 2);
7223 }
7224
7225 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7226 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7227 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7228 CC_GAIN_MAX);
7229 }
7230
7231 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7232 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7233 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7234 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7235 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7236 }
7237
7238 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7239 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7240 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7241 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7242 toneCurve->tonemap_points_cnt,
7243 CAM_MAX_TONEMAP_CURVE_SIZE);
7244 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7245 }
7246 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7247 (float*)toneCurve->curve.tonemap_points,
7248 toneCurve->tonemap_points_cnt * 2);
7249 }
7250
7251 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7252 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7253 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7254 predColorCorrectionGains->gains, 4);
7255 }
7256
7257 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7258 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7259 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7260 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7261 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7262 }
7263
7264 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7265 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7266 }
7267
7268 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7269 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7270 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7271 }
7272
7273 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7274 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7275 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7276 }
7277
7278 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7279 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7280 *effectMode);
7281 if (NAME_NOT_FOUND != val) {
7282 uint8_t fwk_effectMode = (uint8_t)val;
7283 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7284 }
7285 }
7286
7287 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7288 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7289 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7290 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7291 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7292 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7293 }
7294 int32_t fwk_testPatternData[4];
7295 fwk_testPatternData[0] = testPatternData->r;
7296 fwk_testPatternData[3] = testPatternData->b;
7297 switch (gCamCapability[mCameraId]->color_arrangement) {
7298 case CAM_FILTER_ARRANGEMENT_RGGB:
7299 case CAM_FILTER_ARRANGEMENT_GRBG:
7300 fwk_testPatternData[1] = testPatternData->gr;
7301 fwk_testPatternData[2] = testPatternData->gb;
7302 break;
7303 case CAM_FILTER_ARRANGEMENT_GBRG:
7304 case CAM_FILTER_ARRANGEMENT_BGGR:
7305 fwk_testPatternData[2] = testPatternData->gr;
7306 fwk_testPatternData[1] = testPatternData->gb;
7307 break;
7308 default:
7309 LOGE("color arrangement %d is not supported",
7310 gCamCapability[mCameraId]->color_arrangement);
7311 break;
7312 }
7313 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7314 }
7315
7316 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7317 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7318 }
7319
7320 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7321 String8 str((const char *)gps_methods);
7322 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7323 }
7324
7325 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7326 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7327 }
7328
7329 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7330 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7331 }
7332
7333 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7334 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7335 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7336 }
7337
7338 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7339 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7340 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7341 }
7342
7343 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7344 int32_t fwk_thumb_size[2];
7345 fwk_thumb_size[0] = thumb_size->width;
7346 fwk_thumb_size[1] = thumb_size->height;
7347 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7348 }
7349
7350 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7351 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7352 privateData,
7353 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7354 }
7355
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007356 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007357 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007358 meteringMode, 1);
7359 }
7360
Thierry Strudel54dc9782017-02-15 12:12:10 -08007361 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7362 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7363 LOGD("hdr_scene_data: %d %f\n",
7364 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7365 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7366 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7367 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7368 &isHdr, 1);
7369 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7370 &isHdrConfidence, 1);
7371 }
7372
7373
7374
Thierry Strudel3d639192016-09-09 11:52:26 -07007375 if (metadata->is_tuning_params_valid) {
7376 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7377 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7378 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7379
7380
7381 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7382 sizeof(uint32_t));
7383 data += sizeof(uint32_t);
7384
7385 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7386 sizeof(uint32_t));
7387 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7388 data += sizeof(uint32_t);
7389
7390 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7391 sizeof(uint32_t));
7392 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7393 data += sizeof(uint32_t);
7394
7395 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7396 sizeof(uint32_t));
7397 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7398 data += sizeof(uint32_t);
7399
7400 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7401 sizeof(uint32_t));
7402 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7403 data += sizeof(uint32_t);
7404
7405 metadata->tuning_params.tuning_mod3_data_size = 0;
7406 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7407 sizeof(uint32_t));
7408 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7409 data += sizeof(uint32_t);
7410
7411 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7412 TUNING_SENSOR_DATA_MAX);
7413 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7414 count);
7415 data += count;
7416
7417 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7418 TUNING_VFE_DATA_MAX);
7419 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7420 count);
7421 data += count;
7422
7423 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7424 TUNING_CPP_DATA_MAX);
7425 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7426 count);
7427 data += count;
7428
7429 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7430 TUNING_CAC_DATA_MAX);
7431 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7432 count);
7433 data += count;
7434
7435 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7436 (int32_t *)(void *)tuning_meta_data_blob,
7437 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7438 }
7439
7440 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7441 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7442 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7443 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7444 NEUTRAL_COL_POINTS);
7445 }
7446
7447 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7448 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7449 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7450 }
7451
7452 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7453 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7454 // Adjust crop region from sensor output coordinate system to active
7455 // array coordinate system.
7456 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7457 hAeRegions->rect.width, hAeRegions->rect.height);
7458
7459 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7460 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7461 REGIONS_TUPLE_COUNT);
7462 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7463 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7464 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7465 hAeRegions->rect.height);
7466 }
7467
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007468 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7469 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7470 if (NAME_NOT_FOUND != val) {
7471 uint8_t fwkAfMode = (uint8_t)val;
7472 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7473 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7474 } else {
7475 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7476 val);
7477 }
7478 }
7479
Thierry Strudel3d639192016-09-09 11:52:26 -07007480 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7481 uint8_t fwk_afState = (uint8_t) *afState;
7482 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007483 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007484 }
7485
7486 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7487 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7488 }
7489
7490 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7491 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7492 }
7493
7494 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7495 uint8_t fwk_lensState = *lensState;
7496 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7497 }
7498
Thierry Strudel3d639192016-09-09 11:52:26 -07007499
7500 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007501 uint32_t ab_mode = *hal_ab_mode;
7502 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7503 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7504 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7505 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007506 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007507 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007508 if (NAME_NOT_FOUND != val) {
7509 uint8_t fwk_ab_mode = (uint8_t)val;
7510 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7511 }
7512 }
7513
7514 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7515 int val = lookupFwkName(SCENE_MODES_MAP,
7516 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7517 if (NAME_NOT_FOUND != val) {
7518 uint8_t fwkBestshotMode = (uint8_t)val;
7519 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7520 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7521 } else {
7522 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7523 }
7524 }
7525
7526 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7527 uint8_t fwk_mode = (uint8_t) *mode;
7528 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7529 }
7530
7531 /* Constant metadata values to be update*/
7532 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7533 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7534
7535 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7536 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7537
7538 int32_t hotPixelMap[2];
7539 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7540
7541 // CDS
7542 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7543 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7544 }
7545
Thierry Strudel04e026f2016-10-10 11:27:36 -07007546 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7547 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007548 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007549 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7550 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7551 } else {
7552 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7553 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007554
7555 if(fwk_hdr != curr_hdr_state) {
7556 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7557 if(fwk_hdr)
7558 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7559 else
7560 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7561 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007562 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7563 }
7564
Thierry Strudel54dc9782017-02-15 12:12:10 -08007565 //binning correction
7566 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7567 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7568 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7569 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7570 }
7571
Thierry Strudel04e026f2016-10-10 11:27:36 -07007572 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007573 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007574 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7575 int8_t is_ir_on = 0;
7576
7577 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7578 if(is_ir_on != curr_ir_state) {
7579 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7580 if(is_ir_on)
7581 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7582 else
7583 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7584 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007585 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007586 }
7587
Thierry Strudel269c81a2016-10-12 12:13:59 -07007588 // AEC SPEED
7589 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7590 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7591 }
7592
7593 // AWB SPEED
7594 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7595 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7596 }
7597
Thierry Strudel3d639192016-09-09 11:52:26 -07007598 // TNR
7599 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7600 uint8_t tnr_enable = tnr->denoise_enable;
7601 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007602 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7603 int8_t is_tnr_on = 0;
7604
7605 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7606 if(is_tnr_on != curr_tnr_state) {
7607 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7608 if(is_tnr_on)
7609 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7610 else
7611 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7612 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007613
7614 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7615 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7616 }
7617
7618 // Reprocess crop data
7619 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7620 uint8_t cnt = crop_data->num_of_streams;
7621 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7622 // mm-qcamera-daemon only posts crop_data for streams
7623 // not linked to pproc. So no valid crop metadata is not
7624 // necessarily an error case.
7625 LOGD("No valid crop metadata entries");
7626 } else {
7627 uint32_t reproc_stream_id;
7628 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7629 LOGD("No reprocessible stream found, ignore crop data");
7630 } else {
7631 int rc = NO_ERROR;
7632 Vector<int32_t> roi_map;
7633 int32_t *crop = new int32_t[cnt*4];
7634 if (NULL == crop) {
7635 rc = NO_MEMORY;
7636 }
7637 if (NO_ERROR == rc) {
7638 int32_t streams_found = 0;
7639 for (size_t i = 0; i < cnt; i++) {
7640 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7641 if (pprocDone) {
7642 // HAL already does internal reprocessing,
7643 // either via reprocessing before JPEG encoding,
7644 // or offline postprocessing for pproc bypass case.
7645 crop[0] = 0;
7646 crop[1] = 0;
7647 crop[2] = mInputStreamInfo.dim.width;
7648 crop[3] = mInputStreamInfo.dim.height;
7649 } else {
7650 crop[0] = crop_data->crop_info[i].crop.left;
7651 crop[1] = crop_data->crop_info[i].crop.top;
7652 crop[2] = crop_data->crop_info[i].crop.width;
7653 crop[3] = crop_data->crop_info[i].crop.height;
7654 }
7655 roi_map.add(crop_data->crop_info[i].roi_map.left);
7656 roi_map.add(crop_data->crop_info[i].roi_map.top);
7657 roi_map.add(crop_data->crop_info[i].roi_map.width);
7658 roi_map.add(crop_data->crop_info[i].roi_map.height);
7659 streams_found++;
7660 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7661 crop[0], crop[1], crop[2], crop[3]);
7662 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7663 crop_data->crop_info[i].roi_map.left,
7664 crop_data->crop_info[i].roi_map.top,
7665 crop_data->crop_info[i].roi_map.width,
7666 crop_data->crop_info[i].roi_map.height);
7667 break;
7668
7669 }
7670 }
7671 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7672 &streams_found, 1);
7673 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7674 crop, (size_t)(streams_found * 4));
7675 if (roi_map.array()) {
7676 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7677 roi_map.array(), roi_map.size());
7678 }
7679 }
7680 if (crop) {
7681 delete [] crop;
7682 }
7683 }
7684 }
7685 }
7686
7687 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7688 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7689 // so hardcoding the CAC result to OFF mode.
7690 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7691 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7692 } else {
7693 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7694 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7695 *cacMode);
7696 if (NAME_NOT_FOUND != val) {
7697 uint8_t resultCacMode = (uint8_t)val;
7698 // check whether CAC result from CB is equal to Framework set CAC mode
7699 // If not equal then set the CAC mode came in corresponding request
7700 if (fwk_cacMode != resultCacMode) {
7701 resultCacMode = fwk_cacMode;
7702 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007703 //Check if CAC is disabled by property
7704 if (m_cacModeDisabled) {
7705 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7706 }
7707
Thierry Strudel3d639192016-09-09 11:52:26 -07007708 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7709 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7710 } else {
7711 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7712 }
7713 }
7714 }
7715
7716 // Post blob of cam_cds_data through vendor tag.
7717 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7718 uint8_t cnt = cdsInfo->num_of_streams;
7719 cam_cds_data_t cdsDataOverride;
7720 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7721 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7722 cdsDataOverride.num_of_streams = 1;
7723 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7724 uint32_t reproc_stream_id;
7725 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7726 LOGD("No reprocessible stream found, ignore cds data");
7727 } else {
7728 for (size_t i = 0; i < cnt; i++) {
7729 if (cdsInfo->cds_info[i].stream_id ==
7730 reproc_stream_id) {
7731 cdsDataOverride.cds_info[0].cds_enable =
7732 cdsInfo->cds_info[i].cds_enable;
7733 break;
7734 }
7735 }
7736 }
7737 } else {
7738 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7739 }
7740 camMetadata.update(QCAMERA3_CDS_INFO,
7741 (uint8_t *)&cdsDataOverride,
7742 sizeof(cam_cds_data_t));
7743 }
7744
7745 // Ldaf calibration data
7746 if (!mLdafCalibExist) {
7747 IF_META_AVAILABLE(uint32_t, ldafCalib,
7748 CAM_INTF_META_LDAF_EXIF, metadata) {
7749 mLdafCalibExist = true;
7750 mLdafCalib[0] = ldafCalib[0];
7751 mLdafCalib[1] = ldafCalib[1];
7752 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7753 ldafCalib[0], ldafCalib[1]);
7754 }
7755 }
7756
Thierry Strudel54dc9782017-02-15 12:12:10 -08007757 // EXIF debug data through vendor tag
7758 /*
7759 * Mobicat Mask can assume 3 values:
7760 * 1 refers to Mobicat data,
7761 * 2 refers to Stats Debug and Exif Debug Data
7762 * 3 refers to Mobicat and Stats Debug Data
7763 * We want to make sure that we are sending Exif debug data
7764 * only when Mobicat Mask is 2.
7765 */
7766 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7767 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7768 (uint8_t *)(void *)mExifParams.debug_params,
7769 sizeof(mm_jpeg_debug_exif_params_t));
7770 }
7771
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007772 // Reprocess and DDM debug data through vendor tag
7773 cam_reprocess_info_t repro_info;
7774 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007775 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7776 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007777 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007778 }
7779 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7780 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007781 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007782 }
7783 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7784 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007785 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007786 }
7787 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7788 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007789 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007790 }
7791 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7792 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007793 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007794 }
7795 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007796 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007797 }
7798 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7799 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007800 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007801 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007802 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7803 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7804 }
7805 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7806 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7807 }
7808 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7809 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007810
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007811 // INSTANT AEC MODE
7812 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7813 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7814 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7815 }
7816
Shuzhen Wange763e802016-03-31 10:24:29 -07007817 // AF scene change
7818 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7819 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7820 }
7821
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07007822 // Enable ZSL
7823 if (enableZsl != nullptr) {
7824 uint8_t value = *enableZsl ?
7825 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
7826 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
7827 }
7828
Thierry Strudel3d639192016-09-09 11:52:26 -07007829 resultMetadata = camMetadata.release();
7830 return resultMetadata;
7831}
7832
7833/*===========================================================================
7834 * FUNCTION : saveExifParams
7835 *
7836 * DESCRIPTION:
7837 *
7838 * PARAMETERS :
7839 * @metadata : metadata information from callback
7840 *
7841 * RETURN : none
7842 *
7843 *==========================================================================*/
7844void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7845{
7846 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7847 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7848 if (mExifParams.debug_params) {
7849 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7850 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7851 }
7852 }
7853 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7854 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7855 if (mExifParams.debug_params) {
7856 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7857 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7858 }
7859 }
7860 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7861 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7862 if (mExifParams.debug_params) {
7863 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7864 mExifParams.debug_params->af_debug_params_valid = TRUE;
7865 }
7866 }
7867 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7868 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7869 if (mExifParams.debug_params) {
7870 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7871 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7872 }
7873 }
7874 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7875 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7876 if (mExifParams.debug_params) {
7877 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7878 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7879 }
7880 }
7881 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7882 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7883 if (mExifParams.debug_params) {
7884 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7885 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7886 }
7887 }
7888 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7889 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7890 if (mExifParams.debug_params) {
7891 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7892 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7893 }
7894 }
7895 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7896 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7897 if (mExifParams.debug_params) {
7898 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7899 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7900 }
7901 }
7902}
7903
7904/*===========================================================================
7905 * FUNCTION : get3AExifParams
7906 *
7907 * DESCRIPTION:
7908 *
7909 * PARAMETERS : none
7910 *
7911 *
7912 * RETURN : mm_jpeg_exif_params_t
7913 *
7914 *==========================================================================*/
7915mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7916{
7917 return mExifParams;
7918}
7919
7920/*===========================================================================
7921 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7922 *
7923 * DESCRIPTION:
7924 *
7925 * PARAMETERS :
7926 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007927 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
7928 * urgent metadata in a batch. Always true for
7929 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07007930 *
7931 * RETURN : camera_metadata_t*
7932 * metadata in a format specified by fwk
7933 *==========================================================================*/
7934camera_metadata_t*
7935QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007936 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07007937{
7938 CameraMetadata camMetadata;
7939 camera_metadata_t *resultMetadata;
7940
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007941 if (!lastUrgentMetadataInBatch) {
7942 /* In batch mode, use empty metadata if this is not the last in batch
7943 */
7944 resultMetadata = allocate_camera_metadata(0, 0);
7945 return resultMetadata;
7946 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007947
7948 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7949 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7950 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7951 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7952 }
7953
7954 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7955 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7956 &aecTrigger->trigger, 1);
7957 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7958 &aecTrigger->trigger_id, 1);
7959 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7960 aecTrigger->trigger);
7961 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7962 aecTrigger->trigger_id);
7963 }
7964
7965 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7966 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7967 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7968 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7969 }
7970
Thierry Strudel3d639192016-09-09 11:52:26 -07007971 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7972 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7973 &af_trigger->trigger, 1);
7974 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7975 af_trigger->trigger);
7976 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7977 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7978 af_trigger->trigger_id);
7979 }
7980
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07007981 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
7982 /*af regions*/
7983 int32_t afRegions[REGIONS_TUPLE_COUNT];
7984 // Adjust crop region from sensor output coordinate system to active
7985 // array coordinate system.
7986 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
7987 hAfRegions->rect.width, hAfRegions->rect.height);
7988
7989 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
7990 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
7991 REGIONS_TUPLE_COUNT);
7992 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7993 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
7994 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
7995 hAfRegions->rect.height);
7996 }
7997
Shuzhen Wangcc386c52017-03-29 09:28:08 -07007998 // AF region confidence
7999 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8000 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8001 }
8002
Thierry Strudel3d639192016-09-09 11:52:26 -07008003 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8004 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8005 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8006 if (NAME_NOT_FOUND != val) {
8007 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8008 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8009 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8010 } else {
8011 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8012 }
8013 }
8014
8015 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8016 uint32_t aeMode = CAM_AE_MODE_MAX;
8017 int32_t flashMode = CAM_FLASH_MODE_MAX;
8018 int32_t redeye = -1;
8019 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8020 aeMode = *pAeMode;
8021 }
8022 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8023 flashMode = *pFlashMode;
8024 }
8025 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8026 redeye = *pRedeye;
8027 }
8028
8029 if (1 == redeye) {
8030 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8031 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8032 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8033 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8034 flashMode);
8035 if (NAME_NOT_FOUND != val) {
8036 fwk_aeMode = (uint8_t)val;
8037 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8038 } else {
8039 LOGE("Unsupported flash mode %d", flashMode);
8040 }
8041 } else if (aeMode == CAM_AE_MODE_ON) {
8042 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8043 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8044 } else if (aeMode == CAM_AE_MODE_OFF) {
8045 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8046 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008047 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8048 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8049 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008050 } else {
8051 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8052 "flashMode:%d, aeMode:%u!!!",
8053 redeye, flashMode, aeMode);
8054 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008055 if (mInstantAEC) {
8056 // Increment frame Idx count untill a bound reached for instant AEC.
8057 mInstantAecFrameIdxCount++;
8058 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8059 CAM_INTF_META_AEC_INFO, metadata) {
8060 LOGH("ae_params->settled = %d",ae_params->settled);
8061 // If AEC settled, or if number of frames reached bound value,
8062 // should reset instant AEC.
8063 if (ae_params->settled ||
8064 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8065 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8066 mInstantAEC = false;
8067 mResetInstantAEC = true;
8068 mInstantAecFrameIdxCount = 0;
8069 }
8070 }
8071 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008072 resultMetadata = camMetadata.release();
8073 return resultMetadata;
8074}
8075
8076/*===========================================================================
8077 * FUNCTION : dumpMetadataToFile
8078 *
8079 * DESCRIPTION: Dumps tuning metadata to file system
8080 *
8081 * PARAMETERS :
8082 * @meta : tuning metadata
8083 * @dumpFrameCount : current dump frame count
8084 * @enabled : Enable mask
8085 *
8086 *==========================================================================*/
8087void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8088 uint32_t &dumpFrameCount,
8089 bool enabled,
8090 const char *type,
8091 uint32_t frameNumber)
8092{
8093 //Some sanity checks
8094 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8095 LOGE("Tuning sensor data size bigger than expected %d: %d",
8096 meta.tuning_sensor_data_size,
8097 TUNING_SENSOR_DATA_MAX);
8098 return;
8099 }
8100
8101 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8102 LOGE("Tuning VFE data size bigger than expected %d: %d",
8103 meta.tuning_vfe_data_size,
8104 TUNING_VFE_DATA_MAX);
8105 return;
8106 }
8107
8108 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8109 LOGE("Tuning CPP data size bigger than expected %d: %d",
8110 meta.tuning_cpp_data_size,
8111 TUNING_CPP_DATA_MAX);
8112 return;
8113 }
8114
8115 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8116 LOGE("Tuning CAC data size bigger than expected %d: %d",
8117 meta.tuning_cac_data_size,
8118 TUNING_CAC_DATA_MAX);
8119 return;
8120 }
8121 //
8122
8123 if(enabled){
8124 char timeBuf[FILENAME_MAX];
8125 char buf[FILENAME_MAX];
8126 memset(buf, 0, sizeof(buf));
8127 memset(timeBuf, 0, sizeof(timeBuf));
8128 time_t current_time;
8129 struct tm * timeinfo;
8130 time (&current_time);
8131 timeinfo = localtime (&current_time);
8132 if (timeinfo != NULL) {
8133 strftime (timeBuf, sizeof(timeBuf),
8134 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8135 }
8136 String8 filePath(timeBuf);
8137 snprintf(buf,
8138 sizeof(buf),
8139 "%dm_%s_%d.bin",
8140 dumpFrameCount,
8141 type,
8142 frameNumber);
8143 filePath.append(buf);
8144 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8145 if (file_fd >= 0) {
8146 ssize_t written_len = 0;
8147 meta.tuning_data_version = TUNING_DATA_VERSION;
8148 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8149 written_len += write(file_fd, data, sizeof(uint32_t));
8150 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8151 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8152 written_len += write(file_fd, data, sizeof(uint32_t));
8153 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8154 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8155 written_len += write(file_fd, data, sizeof(uint32_t));
8156 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8157 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8158 written_len += write(file_fd, data, sizeof(uint32_t));
8159 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8160 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8161 written_len += write(file_fd, data, sizeof(uint32_t));
8162 meta.tuning_mod3_data_size = 0;
8163 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8164 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8165 written_len += write(file_fd, data, sizeof(uint32_t));
8166 size_t total_size = meta.tuning_sensor_data_size;
8167 data = (void *)((uint8_t *)&meta.data);
8168 written_len += write(file_fd, data, total_size);
8169 total_size = meta.tuning_vfe_data_size;
8170 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8171 written_len += write(file_fd, data, total_size);
8172 total_size = meta.tuning_cpp_data_size;
8173 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8174 written_len += write(file_fd, data, total_size);
8175 total_size = meta.tuning_cac_data_size;
8176 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8177 written_len += write(file_fd, data, total_size);
8178 close(file_fd);
8179 }else {
8180 LOGE("fail to open file for metadata dumping");
8181 }
8182 }
8183}
8184
8185/*===========================================================================
8186 * FUNCTION : cleanAndSortStreamInfo
8187 *
8188 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8189 * and sort them such that raw stream is at the end of the list
8190 * This is a workaround for camera daemon constraint.
8191 *
8192 * PARAMETERS : None
8193 *
8194 *==========================================================================*/
8195void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8196{
8197 List<stream_info_t *> newStreamInfo;
8198
8199 /*clean up invalid streams*/
8200 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8201 it != mStreamInfo.end();) {
8202 if(((*it)->status) == INVALID){
8203 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8204 delete channel;
8205 free(*it);
8206 it = mStreamInfo.erase(it);
8207 } else {
8208 it++;
8209 }
8210 }
8211
8212 // Move preview/video/callback/snapshot streams into newList
8213 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8214 it != mStreamInfo.end();) {
8215 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8216 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8217 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8218 newStreamInfo.push_back(*it);
8219 it = mStreamInfo.erase(it);
8220 } else
8221 it++;
8222 }
8223 // Move raw streams into newList
8224 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8225 it != mStreamInfo.end();) {
8226 newStreamInfo.push_back(*it);
8227 it = mStreamInfo.erase(it);
8228 }
8229
8230 mStreamInfo = newStreamInfo;
8231}
8232
8233/*===========================================================================
8234 * FUNCTION : extractJpegMetadata
8235 *
8236 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8237 * JPEG metadata is cached in HAL, and return as part of capture
8238 * result when metadata is returned from camera daemon.
8239 *
8240 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8241 * @request: capture request
8242 *
8243 *==========================================================================*/
8244void QCamera3HardwareInterface::extractJpegMetadata(
8245 CameraMetadata& jpegMetadata,
8246 const camera3_capture_request_t *request)
8247{
8248 CameraMetadata frame_settings;
8249 frame_settings = request->settings;
8250
8251 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8252 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8253 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8254 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8255
8256 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8257 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8258 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8259 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8260
8261 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8262 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8263 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8264 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8265
8266 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8267 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8268 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8269 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8270
8271 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8272 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8273 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8274 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8275
8276 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8277 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8278 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8279 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8280
8281 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8282 int32_t thumbnail_size[2];
8283 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8284 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8285 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8286 int32_t orientation =
8287 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008288 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008289 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8290 int32_t temp;
8291 temp = thumbnail_size[0];
8292 thumbnail_size[0] = thumbnail_size[1];
8293 thumbnail_size[1] = temp;
8294 }
8295 }
8296 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8297 thumbnail_size,
8298 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8299 }
8300
8301}
8302
8303/*===========================================================================
8304 * FUNCTION : convertToRegions
8305 *
8306 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8307 *
8308 * PARAMETERS :
8309 * @rect : cam_rect_t struct to convert
8310 * @region : int32_t destination array
8311 * @weight : if we are converting from cam_area_t, weight is valid
8312 * else weight = -1
8313 *
8314 *==========================================================================*/
8315void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8316 int32_t *region, int weight)
8317{
8318 region[0] = rect.left;
8319 region[1] = rect.top;
8320 region[2] = rect.left + rect.width;
8321 region[3] = rect.top + rect.height;
8322 if (weight > -1) {
8323 region[4] = weight;
8324 }
8325}
8326
8327/*===========================================================================
8328 * FUNCTION : convertFromRegions
8329 *
8330 * DESCRIPTION: helper method to convert from array to cam_rect_t
8331 *
8332 * PARAMETERS :
8333 * @rect : cam_rect_t struct to convert
8334 * @region : int32_t destination array
8335 * @weight : if we are converting from cam_area_t, weight is valid
8336 * else weight = -1
8337 *
8338 *==========================================================================*/
8339void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008340 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008341{
Thierry Strudel3d639192016-09-09 11:52:26 -07008342 int32_t x_min = frame_settings.find(tag).data.i32[0];
8343 int32_t y_min = frame_settings.find(tag).data.i32[1];
8344 int32_t x_max = frame_settings.find(tag).data.i32[2];
8345 int32_t y_max = frame_settings.find(tag).data.i32[3];
8346 roi.weight = frame_settings.find(tag).data.i32[4];
8347 roi.rect.left = x_min;
8348 roi.rect.top = y_min;
8349 roi.rect.width = x_max - x_min;
8350 roi.rect.height = y_max - y_min;
8351}
8352
8353/*===========================================================================
8354 * FUNCTION : resetIfNeededROI
8355 *
8356 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8357 * crop region
8358 *
8359 * PARAMETERS :
8360 * @roi : cam_area_t struct to resize
8361 * @scalerCropRegion : cam_crop_region_t region to compare against
8362 *
8363 *
8364 *==========================================================================*/
8365bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8366 const cam_crop_region_t* scalerCropRegion)
8367{
8368 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8369 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8370 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8371 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8372
8373 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8374 * without having this check the calculations below to validate if the roi
8375 * is inside scalar crop region will fail resulting in the roi not being
8376 * reset causing algorithm to continue to use stale roi window
8377 */
8378 if (roi->weight == 0) {
8379 return true;
8380 }
8381
8382 if ((roi_x_max < scalerCropRegion->left) ||
8383 // right edge of roi window is left of scalar crop's left edge
8384 (roi_y_max < scalerCropRegion->top) ||
8385 // bottom edge of roi window is above scalar crop's top edge
8386 (roi->rect.left > crop_x_max) ||
8387 // left edge of roi window is beyond(right) of scalar crop's right edge
8388 (roi->rect.top > crop_y_max)){
8389 // top edge of roi windo is above scalar crop's top edge
8390 return false;
8391 }
8392 if (roi->rect.left < scalerCropRegion->left) {
8393 roi->rect.left = scalerCropRegion->left;
8394 }
8395 if (roi->rect.top < scalerCropRegion->top) {
8396 roi->rect.top = scalerCropRegion->top;
8397 }
8398 if (roi_x_max > crop_x_max) {
8399 roi_x_max = crop_x_max;
8400 }
8401 if (roi_y_max > crop_y_max) {
8402 roi_y_max = crop_y_max;
8403 }
8404 roi->rect.width = roi_x_max - roi->rect.left;
8405 roi->rect.height = roi_y_max - roi->rect.top;
8406 return true;
8407}
8408
8409/*===========================================================================
8410 * FUNCTION : convertLandmarks
8411 *
8412 * DESCRIPTION: helper method to extract the landmarks from face detection info
8413 *
8414 * PARAMETERS :
8415 * @landmark_data : input landmark data to be converted
8416 * @landmarks : int32_t destination array
8417 *
8418 *
8419 *==========================================================================*/
8420void QCamera3HardwareInterface::convertLandmarks(
8421 cam_face_landmarks_info_t landmark_data,
8422 int32_t *landmarks)
8423{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008424 if (landmark_data.is_left_eye_valid) {
8425 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8426 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8427 } else {
8428 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8429 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8430 }
8431
8432 if (landmark_data.is_right_eye_valid) {
8433 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8434 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8435 } else {
8436 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8437 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8438 }
8439
8440 if (landmark_data.is_mouth_valid) {
8441 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8442 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8443 } else {
8444 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8445 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8446 }
8447}
8448
8449/*===========================================================================
8450 * FUNCTION : setInvalidLandmarks
8451 *
8452 * DESCRIPTION: helper method to set invalid landmarks
8453 *
8454 * PARAMETERS :
8455 * @landmarks : int32_t destination array
8456 *
8457 *
8458 *==========================================================================*/
8459void QCamera3HardwareInterface::setInvalidLandmarks(
8460 int32_t *landmarks)
8461{
8462 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8463 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8464 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8465 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8466 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8467 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008468}
8469
8470#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008471
8472/*===========================================================================
8473 * FUNCTION : getCapabilities
8474 *
8475 * DESCRIPTION: query camera capability from back-end
8476 *
8477 * PARAMETERS :
8478 * @ops : mm-interface ops structure
8479 * @cam_handle : camera handle for which we need capability
8480 *
8481 * RETURN : ptr type of capability structure
8482 * capability for success
8483 * NULL for failure
8484 *==========================================================================*/
8485cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8486 uint32_t cam_handle)
8487{
8488 int rc = NO_ERROR;
8489 QCamera3HeapMemory *capabilityHeap = NULL;
8490 cam_capability_t *cap_ptr = NULL;
8491
8492 if (ops == NULL) {
8493 LOGE("Invalid arguments");
8494 return NULL;
8495 }
8496
8497 capabilityHeap = new QCamera3HeapMemory(1);
8498 if (capabilityHeap == NULL) {
8499 LOGE("creation of capabilityHeap failed");
8500 return NULL;
8501 }
8502
8503 /* Allocate memory for capability buffer */
8504 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8505 if(rc != OK) {
8506 LOGE("No memory for cappability");
8507 goto allocate_failed;
8508 }
8509
8510 /* Map memory for capability buffer */
8511 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8512
8513 rc = ops->map_buf(cam_handle,
8514 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8515 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8516 if(rc < 0) {
8517 LOGE("failed to map capability buffer");
8518 rc = FAILED_TRANSACTION;
8519 goto map_failed;
8520 }
8521
8522 /* Query Capability */
8523 rc = ops->query_capability(cam_handle);
8524 if(rc < 0) {
8525 LOGE("failed to query capability");
8526 rc = FAILED_TRANSACTION;
8527 goto query_failed;
8528 }
8529
8530 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8531 if (cap_ptr == NULL) {
8532 LOGE("out of memory");
8533 rc = NO_MEMORY;
8534 goto query_failed;
8535 }
8536
8537 memset(cap_ptr, 0, sizeof(cam_capability_t));
8538 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8539
8540 int index;
8541 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8542 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8543 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8544 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8545 }
8546
8547query_failed:
8548 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8549map_failed:
8550 capabilityHeap->deallocate();
8551allocate_failed:
8552 delete capabilityHeap;
8553
8554 if (rc != NO_ERROR) {
8555 return NULL;
8556 } else {
8557 return cap_ptr;
8558 }
8559}
8560
Thierry Strudel3d639192016-09-09 11:52:26 -07008561/*===========================================================================
8562 * FUNCTION : initCapabilities
8563 *
8564 * DESCRIPTION: initialize camera capabilities in static data struct
8565 *
8566 * PARAMETERS :
8567 * @cameraId : camera Id
8568 *
8569 * RETURN : int32_t type of status
8570 * NO_ERROR -- success
8571 * none-zero failure code
8572 *==========================================================================*/
8573int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8574{
8575 int rc = 0;
8576 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008577 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008578
8579 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8580 if (rc) {
8581 LOGE("camera_open failed. rc = %d", rc);
8582 goto open_failed;
8583 }
8584 if (!cameraHandle) {
8585 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8586 goto open_failed;
8587 }
8588
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008589 handle = get_main_camera_handle(cameraHandle->camera_handle);
8590 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8591 if (gCamCapability[cameraId] == NULL) {
8592 rc = FAILED_TRANSACTION;
8593 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008594 }
8595
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008596 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008597 if (is_dual_camera_by_idx(cameraId)) {
8598 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8599 gCamCapability[cameraId]->aux_cam_cap =
8600 getCapabilities(cameraHandle->ops, handle);
8601 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8602 rc = FAILED_TRANSACTION;
8603 free(gCamCapability[cameraId]);
8604 goto failed_op;
8605 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008606
8607 // Copy the main camera capability to main_cam_cap struct
8608 gCamCapability[cameraId]->main_cam_cap =
8609 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8610 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8611 LOGE("out of memory");
8612 rc = NO_MEMORY;
8613 goto failed_op;
8614 }
8615 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8616 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008617 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008618failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008619 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8620 cameraHandle = NULL;
8621open_failed:
8622 return rc;
8623}
8624
8625/*==========================================================================
8626 * FUNCTION : get3Aversion
8627 *
8628 * DESCRIPTION: get the Q3A S/W version
8629 *
8630 * PARAMETERS :
8631 * @sw_version: Reference of Q3A structure which will hold version info upon
8632 * return
8633 *
8634 * RETURN : None
8635 *
8636 *==========================================================================*/
8637void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8638{
8639 if(gCamCapability[mCameraId])
8640 sw_version = gCamCapability[mCameraId]->q3a_version;
8641 else
8642 LOGE("Capability structure NULL!");
8643}
8644
8645
8646/*===========================================================================
8647 * FUNCTION : initParameters
8648 *
8649 * DESCRIPTION: initialize camera parameters
8650 *
8651 * PARAMETERS :
8652 *
8653 * RETURN : int32_t type of status
8654 * NO_ERROR -- success
8655 * none-zero failure code
8656 *==========================================================================*/
8657int QCamera3HardwareInterface::initParameters()
8658{
8659 int rc = 0;
8660
8661 //Allocate Set Param Buffer
8662 mParamHeap = new QCamera3HeapMemory(1);
8663 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8664 if(rc != OK) {
8665 rc = NO_MEMORY;
8666 LOGE("Failed to allocate SETPARM Heap memory");
8667 delete mParamHeap;
8668 mParamHeap = NULL;
8669 return rc;
8670 }
8671
8672 //Map memory for parameters buffer
8673 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8674 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8675 mParamHeap->getFd(0),
8676 sizeof(metadata_buffer_t),
8677 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8678 if(rc < 0) {
8679 LOGE("failed to map SETPARM buffer");
8680 rc = FAILED_TRANSACTION;
8681 mParamHeap->deallocate();
8682 delete mParamHeap;
8683 mParamHeap = NULL;
8684 return rc;
8685 }
8686
8687 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8688
8689 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8690 return rc;
8691}
8692
8693/*===========================================================================
8694 * FUNCTION : deinitParameters
8695 *
8696 * DESCRIPTION: de-initialize camera parameters
8697 *
8698 * PARAMETERS :
8699 *
8700 * RETURN : NONE
8701 *==========================================================================*/
8702void QCamera3HardwareInterface::deinitParameters()
8703{
8704 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8705 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8706
8707 mParamHeap->deallocate();
8708 delete mParamHeap;
8709 mParamHeap = NULL;
8710
8711 mParameters = NULL;
8712
8713 free(mPrevParameters);
8714 mPrevParameters = NULL;
8715}
8716
8717/*===========================================================================
8718 * FUNCTION : calcMaxJpegSize
8719 *
8720 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8721 *
8722 * PARAMETERS :
8723 *
8724 * RETURN : max_jpeg_size
8725 *==========================================================================*/
8726size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8727{
8728 size_t max_jpeg_size = 0;
8729 size_t temp_width, temp_height;
8730 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8731 MAX_SIZES_CNT);
8732 for (size_t i = 0; i < count; i++) {
8733 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8734 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8735 if (temp_width * temp_height > max_jpeg_size ) {
8736 max_jpeg_size = temp_width * temp_height;
8737 }
8738 }
8739 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8740 return max_jpeg_size;
8741}
8742
8743/*===========================================================================
8744 * FUNCTION : getMaxRawSize
8745 *
8746 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8747 *
8748 * PARAMETERS :
8749 *
8750 * RETURN : Largest supported Raw Dimension
8751 *==========================================================================*/
8752cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8753{
8754 int max_width = 0;
8755 cam_dimension_t maxRawSize;
8756
8757 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8758 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8759 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8760 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8761 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8762 }
8763 }
8764 return maxRawSize;
8765}
8766
8767
8768/*===========================================================================
8769 * FUNCTION : calcMaxJpegDim
8770 *
8771 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8772 *
8773 * PARAMETERS :
8774 *
8775 * RETURN : max_jpeg_dim
8776 *==========================================================================*/
8777cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8778{
8779 cam_dimension_t max_jpeg_dim;
8780 cam_dimension_t curr_jpeg_dim;
8781 max_jpeg_dim.width = 0;
8782 max_jpeg_dim.height = 0;
8783 curr_jpeg_dim.width = 0;
8784 curr_jpeg_dim.height = 0;
8785 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8786 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8787 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8788 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8789 max_jpeg_dim.width * max_jpeg_dim.height ) {
8790 max_jpeg_dim.width = curr_jpeg_dim.width;
8791 max_jpeg_dim.height = curr_jpeg_dim.height;
8792 }
8793 }
8794 return max_jpeg_dim;
8795}
8796
8797/*===========================================================================
8798 * FUNCTION : addStreamConfig
8799 *
8800 * DESCRIPTION: adds the stream configuration to the array
8801 *
8802 * PARAMETERS :
8803 * @available_stream_configs : pointer to stream configuration array
8804 * @scalar_format : scalar format
8805 * @dim : configuration dimension
8806 * @config_type : input or output configuration type
8807 *
8808 * RETURN : NONE
8809 *==========================================================================*/
8810void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8811 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8812{
8813 available_stream_configs.add(scalar_format);
8814 available_stream_configs.add(dim.width);
8815 available_stream_configs.add(dim.height);
8816 available_stream_configs.add(config_type);
8817}
8818
8819/*===========================================================================
8820 * FUNCTION : suppportBurstCapture
8821 *
8822 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8823 *
8824 * PARAMETERS :
8825 * @cameraId : camera Id
8826 *
8827 * RETURN : true if camera supports BURST_CAPTURE
8828 * false otherwise
8829 *==========================================================================*/
8830bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8831{
8832 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8833 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8834 const int32_t highResWidth = 3264;
8835 const int32_t highResHeight = 2448;
8836
8837 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8838 // Maximum resolution images cannot be captured at >= 10fps
8839 // -> not supporting BURST_CAPTURE
8840 return false;
8841 }
8842
8843 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8844 // Maximum resolution images can be captured at >= 20fps
8845 // --> supporting BURST_CAPTURE
8846 return true;
8847 }
8848
8849 // Find the smallest highRes resolution, or largest resolution if there is none
8850 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8851 MAX_SIZES_CNT);
8852 size_t highRes = 0;
8853 while ((highRes + 1 < totalCnt) &&
8854 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8855 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8856 highResWidth * highResHeight)) {
8857 highRes++;
8858 }
8859 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8860 return true;
8861 } else {
8862 return false;
8863 }
8864}
8865
8866/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008867 * FUNCTION : getPDStatIndex
8868 *
8869 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8870 *
8871 * PARAMETERS :
8872 * @caps : camera capabilities
8873 *
8874 * RETURN : int32_t type
8875 * non-negative - on success
8876 * -1 - on failure
8877 *==========================================================================*/
8878int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
8879 if (nullptr == caps) {
8880 return -1;
8881 }
8882
8883 uint32_t metaRawCount = caps->meta_raw_channel_count;
8884 int32_t ret = -1;
8885 for (size_t i = 0; i < metaRawCount; i++) {
8886 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
8887 ret = i;
8888 break;
8889 }
8890 }
8891
8892 return ret;
8893}
8894
8895/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07008896 * FUNCTION : initStaticMetadata
8897 *
8898 * DESCRIPTION: initialize the static metadata
8899 *
8900 * PARAMETERS :
8901 * @cameraId : camera Id
8902 *
8903 * RETURN : int32_t type of status
8904 * 0 -- success
8905 * non-zero failure code
8906 *==========================================================================*/
8907int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8908{
8909 int rc = 0;
8910 CameraMetadata staticInfo;
8911 size_t count = 0;
8912 bool limitedDevice = false;
8913 char prop[PROPERTY_VALUE_MAX];
8914 bool supportBurst = false;
8915
8916 supportBurst = supportBurstCapture(cameraId);
8917
8918 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8919 * guaranteed or if min fps of max resolution is less than 20 fps, its
8920 * advertised as limited device*/
8921 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8922 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8923 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8924 !supportBurst;
8925
8926 uint8_t supportedHwLvl = limitedDevice ?
8927 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008928#ifndef USE_HAL_3_3
8929 // LEVEL_3 - This device will support level 3.
8930 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8931#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008932 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008933#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008934
8935 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8936 &supportedHwLvl, 1);
8937
8938 bool facingBack = false;
8939 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8940 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8941 facingBack = true;
8942 }
8943 /*HAL 3 only*/
8944 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8945 &gCamCapability[cameraId]->min_focus_distance, 1);
8946
8947 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8948 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8949
8950 /*should be using focal lengths but sensor doesn't provide that info now*/
8951 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8952 &gCamCapability[cameraId]->focal_length,
8953 1);
8954
8955 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8956 gCamCapability[cameraId]->apertures,
8957 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8958
8959 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8960 gCamCapability[cameraId]->filter_densities,
8961 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8962
8963
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008964 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
8965 size_t mode_count =
8966 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
8967 for (size_t i = 0; i < mode_count; i++) {
8968 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
8969 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008970 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08008971 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07008972
8973 int32_t lens_shading_map_size[] = {
8974 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8975 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8976 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8977 lens_shading_map_size,
8978 sizeof(lens_shading_map_size)/sizeof(int32_t));
8979
8980 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8981 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8982
8983 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8984 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8985
8986 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8987 &gCamCapability[cameraId]->max_frame_duration, 1);
8988
8989 camera_metadata_rational baseGainFactor = {
8990 gCamCapability[cameraId]->base_gain_factor.numerator,
8991 gCamCapability[cameraId]->base_gain_factor.denominator};
8992 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8993 &baseGainFactor, 1);
8994
8995 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8996 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8997
8998 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8999 gCamCapability[cameraId]->pixel_array_size.height};
9000 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9001 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9002
9003 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9004 gCamCapability[cameraId]->active_array_size.top,
9005 gCamCapability[cameraId]->active_array_size.width,
9006 gCamCapability[cameraId]->active_array_size.height};
9007 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9008 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9009
9010 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9011 &gCamCapability[cameraId]->white_level, 1);
9012
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009013 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9014 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9015 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009016 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009017 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009018
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009019#ifndef USE_HAL_3_3
9020 bool hasBlackRegions = false;
9021 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9022 LOGW("black_region_count: %d is bounded to %d",
9023 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9024 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9025 }
9026 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9027 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9028 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9029 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9030 }
9031 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9032 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9033 hasBlackRegions = true;
9034 }
9035#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009036 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9037 &gCamCapability[cameraId]->flash_charge_duration, 1);
9038
9039 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9040 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9041
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009042 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9043 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9044 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009045 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9046 &timestampSource, 1);
9047
Thierry Strudel54dc9782017-02-15 12:12:10 -08009048 //update histogram vendor data
9049 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009050 &gCamCapability[cameraId]->histogram_size, 1);
9051
Thierry Strudel54dc9782017-02-15 12:12:10 -08009052 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009053 &gCamCapability[cameraId]->max_histogram_count, 1);
9054
Shuzhen Wang14415f52016-11-16 18:26:18 -08009055 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9056 //so that app can request fewer number of bins than the maximum supported.
9057 std::vector<int32_t> histBins;
9058 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9059 histBins.push_back(maxHistBins);
9060 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9061 (maxHistBins & 0x1) == 0) {
9062 histBins.push_back(maxHistBins >> 1);
9063 maxHistBins >>= 1;
9064 }
9065 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9066 histBins.data(), histBins.size());
9067
Thierry Strudel3d639192016-09-09 11:52:26 -07009068 int32_t sharpness_map_size[] = {
9069 gCamCapability[cameraId]->sharpness_map_size.width,
9070 gCamCapability[cameraId]->sharpness_map_size.height};
9071
9072 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9073 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9074
9075 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9076 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9077
Emilian Peev0f3c3162017-03-15 12:57:46 +00009078 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9079 if (0 <= indexPD) {
9080 // Advertise PD stats data as part of the Depth capabilities
9081 int32_t depthWidth =
9082 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9083 int32_t depthHeight =
9084 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9085 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9086 assert(0 < depthSamplesCount);
9087 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9088 &depthSamplesCount, 1);
9089
9090 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9091 depthHeight,
9092 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9093 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9094 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9095 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9096 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9097
9098 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9099 depthHeight, 33333333,
9100 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9101 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9102 depthMinDuration,
9103 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9104
9105 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9106 depthHeight, 0,
9107 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9108 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9109 depthStallDuration,
9110 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9111
9112 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9113 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9114 }
9115
Thierry Strudel3d639192016-09-09 11:52:26 -07009116 int32_t scalar_formats[] = {
9117 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9118 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9119 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9120 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9121 HAL_PIXEL_FORMAT_RAW10,
9122 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009123 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9124 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9125 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009126
9127 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9128 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9129 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9130 count, MAX_SIZES_CNT, available_processed_sizes);
9131 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9132 available_processed_sizes, count * 2);
9133
9134 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9135 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9136 makeTable(gCamCapability[cameraId]->raw_dim,
9137 count, MAX_SIZES_CNT, available_raw_sizes);
9138 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9139 available_raw_sizes, count * 2);
9140
9141 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9142 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9143 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9144 count, MAX_SIZES_CNT, available_fps_ranges);
9145 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9146 available_fps_ranges, count * 2);
9147
9148 camera_metadata_rational exposureCompensationStep = {
9149 gCamCapability[cameraId]->exp_compensation_step.numerator,
9150 gCamCapability[cameraId]->exp_compensation_step.denominator};
9151 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9152 &exposureCompensationStep, 1);
9153
9154 Vector<uint8_t> availableVstabModes;
9155 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9156 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009157 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009158 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009159 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009160 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009161 count = IS_TYPE_MAX;
9162 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9163 for (size_t i = 0; i < count; i++) {
9164 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9165 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9166 eisSupported = true;
9167 break;
9168 }
9169 }
9170 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009171 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9172 }
9173 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9174 availableVstabModes.array(), availableVstabModes.size());
9175
9176 /*HAL 1 and HAL 3 common*/
9177 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9178 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9179 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009180 // Cap the max zoom to the max preferred value
9181 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009182 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9183 &maxZoom, 1);
9184
9185 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9186 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9187
9188 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9189 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9190 max3aRegions[2] = 0; /* AF not supported */
9191 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9192 max3aRegions, 3);
9193
9194 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9195 memset(prop, 0, sizeof(prop));
9196 property_get("persist.camera.facedetect", prop, "1");
9197 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9198 LOGD("Support face detection mode: %d",
9199 supportedFaceDetectMode);
9200
9201 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009202 /* support mode should be OFF if max number of face is 0 */
9203 if (maxFaces <= 0) {
9204 supportedFaceDetectMode = 0;
9205 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009206 Vector<uint8_t> availableFaceDetectModes;
9207 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9208 if (supportedFaceDetectMode == 1) {
9209 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9210 } else if (supportedFaceDetectMode == 2) {
9211 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9212 } else if (supportedFaceDetectMode == 3) {
9213 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9214 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9215 } else {
9216 maxFaces = 0;
9217 }
9218 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9219 availableFaceDetectModes.array(),
9220 availableFaceDetectModes.size());
9221 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9222 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009223 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9224 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9225 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009226
9227 int32_t exposureCompensationRange[] = {
9228 gCamCapability[cameraId]->exposure_compensation_min,
9229 gCamCapability[cameraId]->exposure_compensation_max};
9230 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9231 exposureCompensationRange,
9232 sizeof(exposureCompensationRange)/sizeof(int32_t));
9233
9234 uint8_t lensFacing = (facingBack) ?
9235 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9236 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9237
9238 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9239 available_thumbnail_sizes,
9240 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9241
9242 /*all sizes will be clubbed into this tag*/
9243 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9244 /*android.scaler.availableStreamConfigurations*/
9245 Vector<int32_t> available_stream_configs;
9246 cam_dimension_t active_array_dim;
9247 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9248 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009249
9250 /*advertise list of input dimensions supported based on below property.
9251 By default all sizes upto 5MP will be advertised.
9252 Note that the setprop resolution format should be WxH.
9253 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9254 To list all supported sizes, setprop needs to be set with "0x0" */
9255 cam_dimension_t minInputSize = {2592,1944}; //5MP
9256 memset(prop, 0, sizeof(prop));
9257 property_get("persist.camera.input.minsize", prop, "2592x1944");
9258 if (strlen(prop) > 0) {
9259 char *saveptr = NULL;
9260 char *token = strtok_r(prop, "x", &saveptr);
9261 if (token != NULL) {
9262 minInputSize.width = atoi(token);
9263 }
9264 token = strtok_r(NULL, "x", &saveptr);
9265 if (token != NULL) {
9266 minInputSize.height = atoi(token);
9267 }
9268 }
9269
Thierry Strudel3d639192016-09-09 11:52:26 -07009270 /* Add input/output stream configurations for each scalar formats*/
9271 for (size_t j = 0; j < scalar_formats_count; j++) {
9272 switch (scalar_formats[j]) {
9273 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9274 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9275 case HAL_PIXEL_FORMAT_RAW10:
9276 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9277 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9278 addStreamConfig(available_stream_configs, scalar_formats[j],
9279 gCamCapability[cameraId]->raw_dim[i],
9280 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9281 }
9282 break;
9283 case HAL_PIXEL_FORMAT_BLOB:
9284 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9285 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9286 addStreamConfig(available_stream_configs, scalar_formats[j],
9287 gCamCapability[cameraId]->picture_sizes_tbl[i],
9288 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9289 }
9290 break;
9291 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9292 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9293 default:
9294 cam_dimension_t largest_picture_size;
9295 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9296 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9297 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9298 addStreamConfig(available_stream_configs, scalar_formats[j],
9299 gCamCapability[cameraId]->picture_sizes_tbl[i],
9300 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009301 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9302 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9303 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9304 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9305 >= minInputSize.width) || (gCamCapability[cameraId]->
9306 picture_sizes_tbl[i].height >= minInputSize.height)) {
9307 addStreamConfig(available_stream_configs, scalar_formats[j],
9308 gCamCapability[cameraId]->picture_sizes_tbl[i],
9309 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9310 }
9311 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009312 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009313
Thierry Strudel3d639192016-09-09 11:52:26 -07009314 break;
9315 }
9316 }
9317
9318 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9319 available_stream_configs.array(), available_stream_configs.size());
9320 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9321 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9322
9323 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9324 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9325
9326 /* android.scaler.availableMinFrameDurations */
9327 Vector<int64_t> available_min_durations;
9328 for (size_t j = 0; j < scalar_formats_count; j++) {
9329 switch (scalar_formats[j]) {
9330 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9331 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9332 case HAL_PIXEL_FORMAT_RAW10:
9333 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9334 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9335 available_min_durations.add(scalar_formats[j]);
9336 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9337 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9338 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9339 }
9340 break;
9341 default:
9342 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9343 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9344 available_min_durations.add(scalar_formats[j]);
9345 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9346 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9347 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9348 }
9349 break;
9350 }
9351 }
9352 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9353 available_min_durations.array(), available_min_durations.size());
9354
9355 Vector<int32_t> available_hfr_configs;
9356 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9357 int32_t fps = 0;
9358 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9359 case CAM_HFR_MODE_60FPS:
9360 fps = 60;
9361 break;
9362 case CAM_HFR_MODE_90FPS:
9363 fps = 90;
9364 break;
9365 case CAM_HFR_MODE_120FPS:
9366 fps = 120;
9367 break;
9368 case CAM_HFR_MODE_150FPS:
9369 fps = 150;
9370 break;
9371 case CAM_HFR_MODE_180FPS:
9372 fps = 180;
9373 break;
9374 case CAM_HFR_MODE_210FPS:
9375 fps = 210;
9376 break;
9377 case CAM_HFR_MODE_240FPS:
9378 fps = 240;
9379 break;
9380 case CAM_HFR_MODE_480FPS:
9381 fps = 480;
9382 break;
9383 case CAM_HFR_MODE_OFF:
9384 case CAM_HFR_MODE_MAX:
9385 default:
9386 break;
9387 }
9388
9389 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9390 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9391 /* For each HFR frame rate, need to advertise one variable fps range
9392 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9393 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9394 * set by the app. When video recording is started, [120, 120] is
9395 * set. This way sensor configuration does not change when recording
9396 * is started */
9397
9398 /* (width, height, fps_min, fps_max, batch_size_max) */
9399 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9400 j < MAX_SIZES_CNT; j++) {
9401 available_hfr_configs.add(
9402 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9403 available_hfr_configs.add(
9404 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9405 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9406 available_hfr_configs.add(fps);
9407 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9408
9409 /* (width, height, fps_min, fps_max, batch_size_max) */
9410 available_hfr_configs.add(
9411 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9412 available_hfr_configs.add(
9413 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9414 available_hfr_configs.add(fps);
9415 available_hfr_configs.add(fps);
9416 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9417 }
9418 }
9419 }
9420 //Advertise HFR capability only if the property is set
9421 memset(prop, 0, sizeof(prop));
9422 property_get("persist.camera.hal3hfr.enable", prop, "1");
9423 uint8_t hfrEnable = (uint8_t)atoi(prop);
9424
9425 if(hfrEnable && available_hfr_configs.array()) {
9426 staticInfo.update(
9427 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9428 available_hfr_configs.array(), available_hfr_configs.size());
9429 }
9430
9431 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9432 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9433 &max_jpeg_size, 1);
9434
9435 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9436 size_t size = 0;
9437 count = CAM_EFFECT_MODE_MAX;
9438 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9439 for (size_t i = 0; i < count; i++) {
9440 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9441 gCamCapability[cameraId]->supported_effects[i]);
9442 if (NAME_NOT_FOUND != val) {
9443 avail_effects[size] = (uint8_t)val;
9444 size++;
9445 }
9446 }
9447 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9448 avail_effects,
9449 size);
9450
9451 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9452 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9453 size_t supported_scene_modes_cnt = 0;
9454 count = CAM_SCENE_MODE_MAX;
9455 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9456 for (size_t i = 0; i < count; i++) {
9457 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9458 CAM_SCENE_MODE_OFF) {
9459 int val = lookupFwkName(SCENE_MODES_MAP,
9460 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9461 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009462
Thierry Strudel3d639192016-09-09 11:52:26 -07009463 if (NAME_NOT_FOUND != val) {
9464 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9465 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9466 supported_scene_modes_cnt++;
9467 }
9468 }
9469 }
9470 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9471 avail_scene_modes,
9472 supported_scene_modes_cnt);
9473
9474 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9475 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9476 supported_scene_modes_cnt,
9477 CAM_SCENE_MODE_MAX,
9478 scene_mode_overrides,
9479 supported_indexes,
9480 cameraId);
9481
9482 if (supported_scene_modes_cnt == 0) {
9483 supported_scene_modes_cnt = 1;
9484 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9485 }
9486
9487 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9488 scene_mode_overrides, supported_scene_modes_cnt * 3);
9489
9490 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9491 ANDROID_CONTROL_MODE_AUTO,
9492 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9493 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9494 available_control_modes,
9495 3);
9496
9497 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9498 size = 0;
9499 count = CAM_ANTIBANDING_MODE_MAX;
9500 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9501 for (size_t i = 0; i < count; i++) {
9502 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9503 gCamCapability[cameraId]->supported_antibandings[i]);
9504 if (NAME_NOT_FOUND != val) {
9505 avail_antibanding_modes[size] = (uint8_t)val;
9506 size++;
9507 }
9508
9509 }
9510 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9511 avail_antibanding_modes,
9512 size);
9513
9514 uint8_t avail_abberation_modes[] = {
9515 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9516 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9517 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9518 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9519 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9520 if (0 == count) {
9521 // If no aberration correction modes are available for a device, this advertise OFF mode
9522 size = 1;
9523 } else {
9524 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9525 // So, advertize all 3 modes if atleast any one mode is supported as per the
9526 // new M requirement
9527 size = 3;
9528 }
9529 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9530 avail_abberation_modes,
9531 size);
9532
9533 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9534 size = 0;
9535 count = CAM_FOCUS_MODE_MAX;
9536 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9537 for (size_t i = 0; i < count; i++) {
9538 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9539 gCamCapability[cameraId]->supported_focus_modes[i]);
9540 if (NAME_NOT_FOUND != val) {
9541 avail_af_modes[size] = (uint8_t)val;
9542 size++;
9543 }
9544 }
9545 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9546 avail_af_modes,
9547 size);
9548
9549 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9550 size = 0;
9551 count = CAM_WB_MODE_MAX;
9552 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9553 for (size_t i = 0; i < count; i++) {
9554 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9555 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9556 gCamCapability[cameraId]->supported_white_balances[i]);
9557 if (NAME_NOT_FOUND != val) {
9558 avail_awb_modes[size] = (uint8_t)val;
9559 size++;
9560 }
9561 }
9562 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9563 avail_awb_modes,
9564 size);
9565
9566 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9567 count = CAM_FLASH_FIRING_LEVEL_MAX;
9568 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9569 count);
9570 for (size_t i = 0; i < count; i++) {
9571 available_flash_levels[i] =
9572 gCamCapability[cameraId]->supported_firing_levels[i];
9573 }
9574 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9575 available_flash_levels, count);
9576
9577 uint8_t flashAvailable;
9578 if (gCamCapability[cameraId]->flash_available)
9579 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9580 else
9581 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9582 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9583 &flashAvailable, 1);
9584
9585 Vector<uint8_t> avail_ae_modes;
9586 count = CAM_AE_MODE_MAX;
9587 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9588 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009589 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9590 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9591 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9592 }
9593 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009594 }
9595 if (flashAvailable) {
9596 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9597 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009598 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE);
Thierry Strudel3d639192016-09-09 11:52:26 -07009599 }
9600 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9601 avail_ae_modes.array(),
9602 avail_ae_modes.size());
9603
9604 int32_t sensitivity_range[2];
9605 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9606 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9607 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9608 sensitivity_range,
9609 sizeof(sensitivity_range) / sizeof(int32_t));
9610
9611 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9612 &gCamCapability[cameraId]->max_analog_sensitivity,
9613 1);
9614
9615 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9616 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9617 &sensor_orientation,
9618 1);
9619
9620 int32_t max_output_streams[] = {
9621 MAX_STALLING_STREAMS,
9622 MAX_PROCESSED_STREAMS,
9623 MAX_RAW_STREAMS};
9624 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9625 max_output_streams,
9626 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9627
9628 uint8_t avail_leds = 0;
9629 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9630 &avail_leds, 0);
9631
9632 uint8_t focus_dist_calibrated;
9633 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9634 gCamCapability[cameraId]->focus_dist_calibrated);
9635 if (NAME_NOT_FOUND != val) {
9636 focus_dist_calibrated = (uint8_t)val;
9637 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9638 &focus_dist_calibrated, 1);
9639 }
9640
9641 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9642 size = 0;
9643 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9644 MAX_TEST_PATTERN_CNT);
9645 for (size_t i = 0; i < count; i++) {
9646 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9647 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9648 if (NAME_NOT_FOUND != testpatternMode) {
9649 avail_testpattern_modes[size] = testpatternMode;
9650 size++;
9651 }
9652 }
9653 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9654 avail_testpattern_modes,
9655 size);
9656
9657 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9658 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9659 &max_pipeline_depth,
9660 1);
9661
9662 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9663 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9664 &partial_result_count,
9665 1);
9666
9667 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9668 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9669
9670 Vector<uint8_t> available_capabilities;
9671 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9672 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9673 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9674 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9675 if (supportBurst) {
9676 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9677 }
9678 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9679 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9680 if (hfrEnable && available_hfr_configs.array()) {
9681 available_capabilities.add(
9682 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9683 }
9684
9685 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9686 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9687 }
9688 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9689 available_capabilities.array(),
9690 available_capabilities.size());
9691
9692 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9693 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9694 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9695 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9696
9697 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9698 &aeLockAvailable, 1);
9699
9700 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9701 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9702 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9703 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9704
9705 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9706 &awbLockAvailable, 1);
9707
9708 int32_t max_input_streams = 1;
9709 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9710 &max_input_streams,
9711 1);
9712
9713 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9714 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9715 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9716 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9717 HAL_PIXEL_FORMAT_YCbCr_420_888};
9718 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9719 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9720
9721 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9722 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9723 &max_latency,
9724 1);
9725
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009726#ifndef USE_HAL_3_3
9727 int32_t isp_sensitivity_range[2];
9728 isp_sensitivity_range[0] =
9729 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9730 isp_sensitivity_range[1] =
9731 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9732 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9733 isp_sensitivity_range,
9734 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9735#endif
9736
Thierry Strudel3d639192016-09-09 11:52:26 -07009737 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9738 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9739 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9740 available_hot_pixel_modes,
9741 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9742
9743 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9744 ANDROID_SHADING_MODE_FAST,
9745 ANDROID_SHADING_MODE_HIGH_QUALITY};
9746 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9747 available_shading_modes,
9748 3);
9749
9750 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9751 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9752 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9753 available_lens_shading_map_modes,
9754 2);
9755
9756 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9757 ANDROID_EDGE_MODE_FAST,
9758 ANDROID_EDGE_MODE_HIGH_QUALITY,
9759 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9760 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9761 available_edge_modes,
9762 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9763
9764 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9765 ANDROID_NOISE_REDUCTION_MODE_FAST,
9766 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9767 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9768 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9769 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9770 available_noise_red_modes,
9771 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9772
9773 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9774 ANDROID_TONEMAP_MODE_FAST,
9775 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9776 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9777 available_tonemap_modes,
9778 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9779
9780 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9781 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9782 available_hot_pixel_map_modes,
9783 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9784
9785 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9786 gCamCapability[cameraId]->reference_illuminant1);
9787 if (NAME_NOT_FOUND != val) {
9788 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9789 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9790 }
9791
9792 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9793 gCamCapability[cameraId]->reference_illuminant2);
9794 if (NAME_NOT_FOUND != val) {
9795 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9796 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9797 }
9798
9799 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9800 (void *)gCamCapability[cameraId]->forward_matrix1,
9801 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9802
9803 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9804 (void *)gCamCapability[cameraId]->forward_matrix2,
9805 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9806
9807 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9808 (void *)gCamCapability[cameraId]->color_transform1,
9809 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9810
9811 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9812 (void *)gCamCapability[cameraId]->color_transform2,
9813 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9814
9815 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9816 (void *)gCamCapability[cameraId]->calibration_transform1,
9817 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9818
9819 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9820 (void *)gCamCapability[cameraId]->calibration_transform2,
9821 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9822
9823 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9824 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9825 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9826 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9827 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9828 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9829 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9830 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9831 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9832 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9833 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9834 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9835 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9836 ANDROID_JPEG_GPS_COORDINATES,
9837 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9838 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9839 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9840 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9841 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9842 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9843 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9844 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9845 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9846 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009847#ifndef USE_HAL_3_3
9848 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9849#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009850 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009851 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009852 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9853 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009854 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009855 /* DevCamDebug metadata request_keys_basic */
9856 DEVCAMDEBUG_META_ENABLE,
9857 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009858 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9859 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS
Samuel Ha68ba5172016-12-15 18:41:12 -08009860 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009861
9862 size_t request_keys_cnt =
9863 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9864 Vector<int32_t> available_request_keys;
9865 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9866 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9867 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9868 }
9869
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009870 if (gExposeEnableZslKey) {
9871 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
9872 }
9873
Thierry Strudel3d639192016-09-09 11:52:26 -07009874 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9875 available_request_keys.array(), available_request_keys.size());
9876
9877 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9878 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9879 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9880 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9881 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9882 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9883 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9884 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9885 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9886 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9887 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9888 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9889 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9890 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9891 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9892 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9893 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009894 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009895 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9896 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9897 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009898 ANDROID_STATISTICS_FACE_SCORES,
9899#ifndef USE_HAL_3_3
9900 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9901#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009902 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009903 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009904 // DevCamDebug metadata result_keys_basic
9905 DEVCAMDEBUG_META_ENABLE,
9906 // DevCamDebug metadata result_keys AF
9907 DEVCAMDEBUG_AF_LENS_POSITION,
9908 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9909 DEVCAMDEBUG_AF_TOF_DISTANCE,
9910 DEVCAMDEBUG_AF_LUMA,
9911 DEVCAMDEBUG_AF_HAF_STATE,
9912 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9913 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9914 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9915 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9916 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9917 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9918 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9919 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9920 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9921 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9922 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9923 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9924 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9925 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9926 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9927 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9928 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9929 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9930 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9931 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9932 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9933 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9934 // DevCamDebug metadata result_keys AEC
9935 DEVCAMDEBUG_AEC_TARGET_LUMA,
9936 DEVCAMDEBUG_AEC_COMP_LUMA,
9937 DEVCAMDEBUG_AEC_AVG_LUMA,
9938 DEVCAMDEBUG_AEC_CUR_LUMA,
9939 DEVCAMDEBUG_AEC_LINECOUNT,
9940 DEVCAMDEBUG_AEC_REAL_GAIN,
9941 DEVCAMDEBUG_AEC_EXP_INDEX,
9942 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -08009943 // DevCamDebug metadata result_keys zzHDR
9944 DEVCAMDEBUG_AEC_L_REAL_GAIN,
9945 DEVCAMDEBUG_AEC_L_LINECOUNT,
9946 DEVCAMDEBUG_AEC_S_REAL_GAIN,
9947 DEVCAMDEBUG_AEC_S_LINECOUNT,
9948 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
9949 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
9950 // DevCamDebug metadata result_keys ADRC
9951 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
9952 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
9953 DEVCAMDEBUG_AEC_GTM_RATIO,
9954 DEVCAMDEBUG_AEC_LTM_RATIO,
9955 DEVCAMDEBUG_AEC_LA_RATIO,
9956 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -08009957 // DevCamDebug metadata result_keys AWB
9958 DEVCAMDEBUG_AWB_R_GAIN,
9959 DEVCAMDEBUG_AWB_G_GAIN,
9960 DEVCAMDEBUG_AWB_B_GAIN,
9961 DEVCAMDEBUG_AWB_CCT,
9962 DEVCAMDEBUG_AWB_DECISION,
9963 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009964 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
9965 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
9966 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009967 };
9968
Thierry Strudel3d639192016-09-09 11:52:26 -07009969 size_t result_keys_cnt =
9970 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9971
9972 Vector<int32_t> available_result_keys;
9973 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9974 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9975 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9976 }
9977 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9978 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9979 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9980 }
9981 if (supportedFaceDetectMode == 1) {
9982 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9983 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9984 } else if ((supportedFaceDetectMode == 2) ||
9985 (supportedFaceDetectMode == 3)) {
9986 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9987 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9988 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009989#ifndef USE_HAL_3_3
9990 if (hasBlackRegions) {
9991 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9992 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9993 }
9994#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009995
9996 if (gExposeEnableZslKey) {
9997 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
9998 }
9999
Thierry Strudel3d639192016-09-09 11:52:26 -070010000 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10001 available_result_keys.array(), available_result_keys.size());
10002
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010003 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010004 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10005 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10006 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10007 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10008 ANDROID_SCALER_CROPPING_TYPE,
10009 ANDROID_SYNC_MAX_LATENCY,
10010 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10011 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10012 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10013 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10014 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10015 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10016 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10017 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10018 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10019 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10020 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10021 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10022 ANDROID_LENS_FACING,
10023 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10024 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10025 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10026 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10027 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10028 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10029 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10030 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10031 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10032 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10033 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10034 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10035 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10036 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10037 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10038 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10039 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10040 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10041 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10042 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010043 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010044 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10045 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10046 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10047 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10048 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10049 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10050 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10051 ANDROID_CONTROL_AVAILABLE_MODES,
10052 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10053 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10054 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10055 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010056 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10057#ifndef USE_HAL_3_3
10058 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10059 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10060#endif
10061 };
10062
10063 Vector<int32_t> available_characteristics_keys;
10064 available_characteristics_keys.appendArray(characteristics_keys_basic,
10065 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10066#ifndef USE_HAL_3_3
10067 if (hasBlackRegions) {
10068 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10069 }
10070#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010071
10072 if (0 <= indexPD) {
10073 int32_t depthKeys[] = {
10074 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10075 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10076 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10077 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10078 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10079 };
10080 available_characteristics_keys.appendArray(depthKeys,
10081 sizeof(depthKeys) / sizeof(depthKeys[0]));
10082 }
10083
Thierry Strudel3d639192016-09-09 11:52:26 -070010084 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010085 available_characteristics_keys.array(),
10086 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010087
10088 /*available stall durations depend on the hw + sw and will be different for different devices */
10089 /*have to add for raw after implementation*/
10090 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10091 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10092
10093 Vector<int64_t> available_stall_durations;
10094 for (uint32_t j = 0; j < stall_formats_count; j++) {
10095 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10096 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10097 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10098 available_stall_durations.add(stall_formats[j]);
10099 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10100 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10101 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10102 }
10103 } else {
10104 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10105 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10106 available_stall_durations.add(stall_formats[j]);
10107 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10108 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10109 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10110 }
10111 }
10112 }
10113 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10114 available_stall_durations.array(),
10115 available_stall_durations.size());
10116
10117 //QCAMERA3_OPAQUE_RAW
10118 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10119 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10120 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10121 case LEGACY_RAW:
10122 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10123 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10124 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10125 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10126 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10127 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10128 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10129 break;
10130 case MIPI_RAW:
10131 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10132 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10133 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10134 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10135 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10136 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10137 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10138 break;
10139 default:
10140 LOGE("unknown opaque_raw_format %d",
10141 gCamCapability[cameraId]->opaque_raw_fmt);
10142 break;
10143 }
10144 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10145
10146 Vector<int32_t> strides;
10147 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10148 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10149 cam_stream_buf_plane_info_t buf_planes;
10150 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10151 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10152 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10153 &gCamCapability[cameraId]->padding_info, &buf_planes);
10154 strides.add(buf_planes.plane_info.mp[0].stride);
10155 }
10156 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10157 strides.size());
10158
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010159 //TBD: remove the following line once backend advertises zzHDR in feature mask
10160 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010161 //Video HDR default
10162 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10163 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010164 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010165 int32_t vhdr_mode[] = {
10166 QCAMERA3_VIDEO_HDR_MODE_OFF,
10167 QCAMERA3_VIDEO_HDR_MODE_ON};
10168
10169 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10170 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10171 vhdr_mode, vhdr_mode_count);
10172 }
10173
Thierry Strudel3d639192016-09-09 11:52:26 -070010174 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10175 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10176 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10177
10178 uint8_t isMonoOnly =
10179 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10180 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10181 &isMonoOnly, 1);
10182
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010183#ifndef USE_HAL_3_3
10184 Vector<int32_t> opaque_size;
10185 for (size_t j = 0; j < scalar_formats_count; j++) {
10186 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10187 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10188 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10189 cam_stream_buf_plane_info_t buf_planes;
10190
10191 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10192 &gCamCapability[cameraId]->padding_info, &buf_planes);
10193
10194 if (rc == 0) {
10195 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10196 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10197 opaque_size.add(buf_planes.plane_info.frame_len);
10198 }else {
10199 LOGE("raw frame calculation failed!");
10200 }
10201 }
10202 }
10203 }
10204
10205 if ((opaque_size.size() > 0) &&
10206 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10207 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10208 else
10209 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10210#endif
10211
Thierry Strudel04e026f2016-10-10 11:27:36 -070010212 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10213 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10214 size = 0;
10215 count = CAM_IR_MODE_MAX;
10216 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10217 for (size_t i = 0; i < count; i++) {
10218 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10219 gCamCapability[cameraId]->supported_ir_modes[i]);
10220 if (NAME_NOT_FOUND != val) {
10221 avail_ir_modes[size] = (int32_t)val;
10222 size++;
10223 }
10224 }
10225 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10226 avail_ir_modes, size);
10227 }
10228
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010229 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10230 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10231 size = 0;
10232 count = CAM_AEC_CONVERGENCE_MAX;
10233 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10234 for (size_t i = 0; i < count; i++) {
10235 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10236 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10237 if (NAME_NOT_FOUND != val) {
10238 available_instant_aec_modes[size] = (int32_t)val;
10239 size++;
10240 }
10241 }
10242 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10243 available_instant_aec_modes, size);
10244 }
10245
Thierry Strudel54dc9782017-02-15 12:12:10 -080010246 int32_t sharpness_range[] = {
10247 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10248 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10249 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10250
10251 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10252 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10253 size = 0;
10254 count = CAM_BINNING_CORRECTION_MODE_MAX;
10255 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10256 for (size_t i = 0; i < count; i++) {
10257 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10258 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10259 gCamCapability[cameraId]->supported_binning_modes[i]);
10260 if (NAME_NOT_FOUND != val) {
10261 avail_binning_modes[size] = (int32_t)val;
10262 size++;
10263 }
10264 }
10265 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10266 avail_binning_modes, size);
10267 }
10268
10269 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10270 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10271 size = 0;
10272 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10273 for (size_t i = 0; i < count; i++) {
10274 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10275 gCamCapability[cameraId]->supported_aec_modes[i]);
10276 if (NAME_NOT_FOUND != val)
10277 available_aec_modes[size++] = val;
10278 }
10279 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10280 available_aec_modes, size);
10281 }
10282
10283 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10284 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10285 size = 0;
10286 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10287 for (size_t i = 0; i < count; i++) {
10288 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10289 gCamCapability[cameraId]->supported_iso_modes[i]);
10290 if (NAME_NOT_FOUND != val)
10291 available_iso_modes[size++] = val;
10292 }
10293 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10294 available_iso_modes, size);
10295 }
10296
10297 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
10298 for (size_t i = 0; i < count; i++)
10299 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10300 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10301 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10302
10303 int32_t available_saturation_range[4];
10304 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10305 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10306 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10307 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10308 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10309 available_saturation_range, 4);
10310
10311 uint8_t is_hdr_values[2];
10312 is_hdr_values[0] = 0;
10313 is_hdr_values[1] = 1;
10314 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10315 is_hdr_values, 2);
10316
10317 float is_hdr_confidence_range[2];
10318 is_hdr_confidence_range[0] = 0.0;
10319 is_hdr_confidence_range[1] = 1.0;
10320 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10321 is_hdr_confidence_range, 2);
10322
Emilian Peev0a972ef2017-03-16 10:25:53 +000010323 size_t eepromLength = strnlen(
10324 reinterpret_cast<const char *>(
10325 gCamCapability[cameraId]->eeprom_version_info),
10326 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10327 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010328 char easelInfo[] = ",E:N";
10329 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10330 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10331 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010332 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10333 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010334 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010335 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10336 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10337 }
10338
Thierry Strudel3d639192016-09-09 11:52:26 -070010339 gStaticMetadata[cameraId] = staticInfo.release();
10340 return rc;
10341}
10342
10343/*===========================================================================
10344 * FUNCTION : makeTable
10345 *
10346 * DESCRIPTION: make a table of sizes
10347 *
10348 * PARAMETERS :
10349 *
10350 *
10351 *==========================================================================*/
10352void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10353 size_t max_size, int32_t *sizeTable)
10354{
10355 size_t j = 0;
10356 if (size > max_size) {
10357 size = max_size;
10358 }
10359 for (size_t i = 0; i < size; i++) {
10360 sizeTable[j] = dimTable[i].width;
10361 sizeTable[j+1] = dimTable[i].height;
10362 j+=2;
10363 }
10364}
10365
10366/*===========================================================================
10367 * FUNCTION : makeFPSTable
10368 *
10369 * DESCRIPTION: make a table of fps ranges
10370 *
10371 * PARAMETERS :
10372 *
10373 *==========================================================================*/
10374void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10375 size_t max_size, int32_t *fpsRangesTable)
10376{
10377 size_t j = 0;
10378 if (size > max_size) {
10379 size = max_size;
10380 }
10381 for (size_t i = 0; i < size; i++) {
10382 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10383 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10384 j+=2;
10385 }
10386}
10387
10388/*===========================================================================
10389 * FUNCTION : makeOverridesList
10390 *
10391 * DESCRIPTION: make a list of scene mode overrides
10392 *
10393 * PARAMETERS :
10394 *
10395 *
10396 *==========================================================================*/
10397void QCamera3HardwareInterface::makeOverridesList(
10398 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10399 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10400{
10401 /*daemon will give a list of overrides for all scene modes.
10402 However we should send the fwk only the overrides for the scene modes
10403 supported by the framework*/
10404 size_t j = 0;
10405 if (size > max_size) {
10406 size = max_size;
10407 }
10408 size_t focus_count = CAM_FOCUS_MODE_MAX;
10409 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10410 focus_count);
10411 for (size_t i = 0; i < size; i++) {
10412 bool supt = false;
10413 size_t index = supported_indexes[i];
10414 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10415 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10416 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10417 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10418 overridesTable[index].awb_mode);
10419 if (NAME_NOT_FOUND != val) {
10420 overridesList[j+1] = (uint8_t)val;
10421 }
10422 uint8_t focus_override = overridesTable[index].af_mode;
10423 for (size_t k = 0; k < focus_count; k++) {
10424 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10425 supt = true;
10426 break;
10427 }
10428 }
10429 if (supt) {
10430 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10431 focus_override);
10432 if (NAME_NOT_FOUND != val) {
10433 overridesList[j+2] = (uint8_t)val;
10434 }
10435 } else {
10436 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10437 }
10438 j+=3;
10439 }
10440}
10441
10442/*===========================================================================
10443 * FUNCTION : filterJpegSizes
10444 *
10445 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10446 * could be downscaled to
10447 *
10448 * PARAMETERS :
10449 *
10450 * RETURN : length of jpegSizes array
10451 *==========================================================================*/
10452
10453size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10454 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10455 uint8_t downscale_factor)
10456{
10457 if (0 == downscale_factor) {
10458 downscale_factor = 1;
10459 }
10460
10461 int32_t min_width = active_array_size.width / downscale_factor;
10462 int32_t min_height = active_array_size.height / downscale_factor;
10463 size_t jpegSizesCnt = 0;
10464 if (processedSizesCnt > maxCount) {
10465 processedSizesCnt = maxCount;
10466 }
10467 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10468 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10469 jpegSizes[jpegSizesCnt] = processedSizes[i];
10470 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10471 jpegSizesCnt += 2;
10472 }
10473 }
10474 return jpegSizesCnt;
10475}
10476
10477/*===========================================================================
10478 * FUNCTION : computeNoiseModelEntryS
10479 *
10480 * DESCRIPTION: function to map a given sensitivity to the S noise
10481 * model parameters in the DNG noise model.
10482 *
10483 * PARAMETERS : sens : the sensor sensitivity
10484 *
10485 ** RETURN : S (sensor amplification) noise
10486 *
10487 *==========================================================================*/
10488double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10489 double s = gCamCapability[mCameraId]->gradient_S * sens +
10490 gCamCapability[mCameraId]->offset_S;
10491 return ((s < 0.0) ? 0.0 : s);
10492}
10493
10494/*===========================================================================
10495 * FUNCTION : computeNoiseModelEntryO
10496 *
10497 * DESCRIPTION: function to map a given sensitivity to the O noise
10498 * model parameters in the DNG noise model.
10499 *
10500 * PARAMETERS : sens : the sensor sensitivity
10501 *
10502 ** RETURN : O (sensor readout) noise
10503 *
10504 *==========================================================================*/
10505double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10506 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10507 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10508 1.0 : (1.0 * sens / max_analog_sens);
10509 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10510 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10511 return ((o < 0.0) ? 0.0 : o);
10512}
10513
10514/*===========================================================================
10515 * FUNCTION : getSensorSensitivity
10516 *
10517 * DESCRIPTION: convert iso_mode to an integer value
10518 *
10519 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10520 *
10521 ** RETURN : sensitivity supported by sensor
10522 *
10523 *==========================================================================*/
10524int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10525{
10526 int32_t sensitivity;
10527
10528 switch (iso_mode) {
10529 case CAM_ISO_MODE_100:
10530 sensitivity = 100;
10531 break;
10532 case CAM_ISO_MODE_200:
10533 sensitivity = 200;
10534 break;
10535 case CAM_ISO_MODE_400:
10536 sensitivity = 400;
10537 break;
10538 case CAM_ISO_MODE_800:
10539 sensitivity = 800;
10540 break;
10541 case CAM_ISO_MODE_1600:
10542 sensitivity = 1600;
10543 break;
10544 default:
10545 sensitivity = -1;
10546 break;
10547 }
10548 return sensitivity;
10549}
10550
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010551int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010552 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010553 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10554 // to connect to Easel.
10555 bool doNotpowerOnEasel =
10556 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10557
10558 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010559 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10560 return OK;
10561 }
10562
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010563 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010564 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010565 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010566 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010567 return res;
10568 }
10569
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010570 EaselManagerClientOpened = true;
10571
10572 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010573 if (res != OK) {
10574 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10575 }
10576
10577 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010578 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010579
10580 // Expose enableZsl key only when HDR+ mode is enabled.
10581 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010582 }
10583
10584 return OK;
10585}
10586
Thierry Strudel3d639192016-09-09 11:52:26 -070010587/*===========================================================================
10588 * FUNCTION : getCamInfo
10589 *
10590 * DESCRIPTION: query camera capabilities
10591 *
10592 * PARAMETERS :
10593 * @cameraId : camera Id
10594 * @info : camera info struct to be filled in with camera capabilities
10595 *
10596 * RETURN : int type of status
10597 * NO_ERROR -- success
10598 * none-zero failure code
10599 *==========================================================================*/
10600int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10601 struct camera_info *info)
10602{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010603 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010604 int rc = 0;
10605
10606 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010607
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010608 {
10609 Mutex::Autolock l(gHdrPlusClientLock);
10610 rc = initHdrPlusClientLocked();
10611 if (rc != OK) {
10612 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10613 pthread_mutex_unlock(&gCamLock);
10614 return rc;
10615 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010616 }
10617
Thierry Strudel3d639192016-09-09 11:52:26 -070010618 if (NULL == gCamCapability[cameraId]) {
10619 rc = initCapabilities(cameraId);
10620 if (rc < 0) {
10621 pthread_mutex_unlock(&gCamLock);
10622 return rc;
10623 }
10624 }
10625
10626 if (NULL == gStaticMetadata[cameraId]) {
10627 rc = initStaticMetadata(cameraId);
10628 if (rc < 0) {
10629 pthread_mutex_unlock(&gCamLock);
10630 return rc;
10631 }
10632 }
10633
10634 switch(gCamCapability[cameraId]->position) {
10635 case CAM_POSITION_BACK:
10636 case CAM_POSITION_BACK_AUX:
10637 info->facing = CAMERA_FACING_BACK;
10638 break;
10639
10640 case CAM_POSITION_FRONT:
10641 case CAM_POSITION_FRONT_AUX:
10642 info->facing = CAMERA_FACING_FRONT;
10643 break;
10644
10645 default:
10646 LOGE("Unknown position type %d for camera id:%d",
10647 gCamCapability[cameraId]->position, cameraId);
10648 rc = -1;
10649 break;
10650 }
10651
10652
10653 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010654#ifndef USE_HAL_3_3
10655 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10656#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010657 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010658#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010659 info->static_camera_characteristics = gStaticMetadata[cameraId];
10660
10661 //For now assume both cameras can operate independently.
10662 info->conflicting_devices = NULL;
10663 info->conflicting_devices_length = 0;
10664
10665 //resource cost is 100 * MIN(1.0, m/M),
10666 //where m is throughput requirement with maximum stream configuration
10667 //and M is CPP maximum throughput.
10668 float max_fps = 0.0;
10669 for (uint32_t i = 0;
10670 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10671 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10672 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10673 }
10674 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10675 gCamCapability[cameraId]->active_array_size.width *
10676 gCamCapability[cameraId]->active_array_size.height * max_fps /
10677 gCamCapability[cameraId]->max_pixel_bandwidth;
10678 info->resource_cost = 100 * MIN(1.0, ratio);
10679 LOGI("camera %d resource cost is %d", cameraId,
10680 info->resource_cost);
10681
10682 pthread_mutex_unlock(&gCamLock);
10683 return rc;
10684}
10685
10686/*===========================================================================
10687 * FUNCTION : translateCapabilityToMetadata
10688 *
10689 * DESCRIPTION: translate the capability into camera_metadata_t
10690 *
10691 * PARAMETERS : type of the request
10692 *
10693 *
10694 * RETURN : success: camera_metadata_t*
10695 * failure: NULL
10696 *
10697 *==========================================================================*/
10698camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10699{
10700 if (mDefaultMetadata[type] != NULL) {
10701 return mDefaultMetadata[type];
10702 }
10703 //first time we are handling this request
10704 //fill up the metadata structure using the wrapper class
10705 CameraMetadata settings;
10706 //translate from cam_capability_t to camera_metadata_tag_t
10707 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10708 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10709 int32_t defaultRequestID = 0;
10710 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10711
10712 /* OIS disable */
10713 char ois_prop[PROPERTY_VALUE_MAX];
10714 memset(ois_prop, 0, sizeof(ois_prop));
10715 property_get("persist.camera.ois.disable", ois_prop, "0");
10716 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10717
10718 /* Force video to use OIS */
10719 char videoOisProp[PROPERTY_VALUE_MAX];
10720 memset(videoOisProp, 0, sizeof(videoOisProp));
10721 property_get("persist.camera.ois.video", videoOisProp, "1");
10722 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010723
10724 // Hybrid AE enable/disable
10725 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10726 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10727 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10728 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10729
Thierry Strudel3d639192016-09-09 11:52:26 -070010730 uint8_t controlIntent = 0;
10731 uint8_t focusMode;
10732 uint8_t vsMode;
10733 uint8_t optStabMode;
10734 uint8_t cacMode;
10735 uint8_t edge_mode;
10736 uint8_t noise_red_mode;
10737 uint8_t tonemap_mode;
10738 bool highQualityModeEntryAvailable = FALSE;
10739 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010740 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010741 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10742 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010743 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010744 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010745 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010746
Thierry Strudel3d639192016-09-09 11:52:26 -070010747 switch (type) {
10748 case CAMERA3_TEMPLATE_PREVIEW:
10749 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10750 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10751 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10752 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10753 edge_mode = ANDROID_EDGE_MODE_FAST;
10754 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10755 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10756 break;
10757 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10758 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10759 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10760 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10761 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10762 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10763 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10764 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10765 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10766 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10767 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10768 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10769 highQualityModeEntryAvailable = TRUE;
10770 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10771 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10772 fastModeEntryAvailable = TRUE;
10773 }
10774 }
10775 if (highQualityModeEntryAvailable) {
10776 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10777 } else if (fastModeEntryAvailable) {
10778 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10779 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010780 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10781 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10782 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010783 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010784 break;
10785 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10786 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10787 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10788 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010789 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10790 edge_mode = ANDROID_EDGE_MODE_FAST;
10791 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10792 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10793 if (forceVideoOis)
10794 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10795 break;
10796 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10797 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10798 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10799 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010800 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10801 edge_mode = ANDROID_EDGE_MODE_FAST;
10802 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10803 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10804 if (forceVideoOis)
10805 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10806 break;
10807 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10808 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10809 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10810 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10811 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10812 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10813 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10814 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10815 break;
10816 case CAMERA3_TEMPLATE_MANUAL:
10817 edge_mode = ANDROID_EDGE_MODE_FAST;
10818 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10819 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10820 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10821 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10822 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10823 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10824 break;
10825 default:
10826 edge_mode = ANDROID_EDGE_MODE_FAST;
10827 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10828 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10829 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10830 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10831 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10832 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10833 break;
10834 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010835 // Set CAC to OFF if underlying device doesn't support
10836 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10837 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10838 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010839 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10840 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10841 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10842 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10843 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10844 }
10845 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010846 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010847 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010848
10849 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10850 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10851 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10852 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10853 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10854 || ois_disable)
10855 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10856 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010857 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010858
10859 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10860 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10861
10862 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10863 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10864
10865 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10866 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10867
10868 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10869 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10870
10871 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10872 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10873
10874 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10875 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10876
10877 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10878 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10879
10880 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10881 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10882
10883 /*flash*/
10884 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10885 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10886
10887 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10888 settings.update(ANDROID_FLASH_FIRING_POWER,
10889 &flashFiringLevel, 1);
10890
10891 /* lens */
10892 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10893 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10894
10895 if (gCamCapability[mCameraId]->filter_densities_count) {
10896 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10897 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10898 gCamCapability[mCameraId]->filter_densities_count);
10899 }
10900
10901 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10902 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10903
Thierry Strudel3d639192016-09-09 11:52:26 -070010904 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10905 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10906
10907 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10908 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10909
10910 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10911 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10912
10913 /* face detection (default to OFF) */
10914 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10915 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10916
Thierry Strudel54dc9782017-02-15 12:12:10 -080010917 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10918 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010919
10920 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10921 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10922
10923 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10924 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10925
Thierry Strudel3d639192016-09-09 11:52:26 -070010926
10927 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10928 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10929
10930 /* Exposure time(Update the Min Exposure Time)*/
10931 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10932 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10933
10934 /* frame duration */
10935 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
10936 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
10937
10938 /* sensitivity */
10939 static const int32_t default_sensitivity = 100;
10940 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010941#ifndef USE_HAL_3_3
10942 static const int32_t default_isp_sensitivity =
10943 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
10944 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
10945#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010946
10947 /*edge mode*/
10948 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
10949
10950 /*noise reduction mode*/
10951 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
10952
10953 /*color correction mode*/
10954 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
10955 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
10956
10957 /*transform matrix mode*/
10958 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
10959
10960 int32_t scaler_crop_region[4];
10961 scaler_crop_region[0] = 0;
10962 scaler_crop_region[1] = 0;
10963 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
10964 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10965 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10966
10967 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10968 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10969
10970 /*focus distance*/
10971 float focus_distance = 0.0;
10972 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10973
10974 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010975 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010976 float max_range = 0.0;
10977 float max_fixed_fps = 0.0;
10978 int32_t fps_range[2] = {0, 0};
10979 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10980 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010981 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10982 TEMPLATE_MAX_PREVIEW_FPS) {
10983 continue;
10984 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010985 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10986 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10987 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10988 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10989 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10990 if (range > max_range) {
10991 fps_range[0] =
10992 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10993 fps_range[1] =
10994 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10995 max_range = range;
10996 }
10997 } else {
10998 if (range < 0.01 && max_fixed_fps <
10999 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11000 fps_range[0] =
11001 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11002 fps_range[1] =
11003 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11004 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11005 }
11006 }
11007 }
11008 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11009
11010 /*precapture trigger*/
11011 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11012 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11013
11014 /*af trigger*/
11015 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11016 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11017
11018 /* ae & af regions */
11019 int32_t active_region[] = {
11020 gCamCapability[mCameraId]->active_array_size.left,
11021 gCamCapability[mCameraId]->active_array_size.top,
11022 gCamCapability[mCameraId]->active_array_size.left +
11023 gCamCapability[mCameraId]->active_array_size.width,
11024 gCamCapability[mCameraId]->active_array_size.top +
11025 gCamCapability[mCameraId]->active_array_size.height,
11026 0};
11027 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11028 sizeof(active_region) / sizeof(active_region[0]));
11029 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11030 sizeof(active_region) / sizeof(active_region[0]));
11031
11032 /* black level lock */
11033 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11034 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11035
Thierry Strudel3d639192016-09-09 11:52:26 -070011036 //special defaults for manual template
11037 if (type == CAMERA3_TEMPLATE_MANUAL) {
11038 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11039 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11040
11041 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11042 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11043
11044 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11045 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11046
11047 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11048 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11049
11050 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11051 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11052
11053 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11054 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11055 }
11056
11057
11058 /* TNR
11059 * We'll use this location to determine which modes TNR will be set.
11060 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11061 * This is not to be confused with linking on a per stream basis that decision
11062 * is still on per-session basis and will be handled as part of config stream
11063 */
11064 uint8_t tnr_enable = 0;
11065
11066 if (m_bTnrPreview || m_bTnrVideo) {
11067
11068 switch (type) {
11069 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11070 tnr_enable = 1;
11071 break;
11072
11073 default:
11074 tnr_enable = 0;
11075 break;
11076 }
11077
11078 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11079 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11080 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11081
11082 LOGD("TNR:%d with process plate %d for template:%d",
11083 tnr_enable, tnr_process_type, type);
11084 }
11085
11086 //Update Link tags to default
11087 int32_t sync_type = CAM_TYPE_STANDALONE;
11088 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11089
11090 int32_t is_main = 0; //this doesn't matter as app should overwrite
11091 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11092
11093 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
11094
11095 /* CDS default */
11096 char prop[PROPERTY_VALUE_MAX];
11097 memset(prop, 0, sizeof(prop));
11098 property_get("persist.camera.CDS", prop, "Auto");
11099 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11100 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11101 if (CAM_CDS_MODE_MAX == cds_mode) {
11102 cds_mode = CAM_CDS_MODE_AUTO;
11103 }
11104
11105 /* Disabling CDS in templates which have TNR enabled*/
11106 if (tnr_enable)
11107 cds_mode = CAM_CDS_MODE_OFF;
11108
11109 int32_t mode = cds_mode;
11110 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011111
Thierry Strudel269c81a2016-10-12 12:13:59 -070011112 /* Manual Convergence AEC Speed is disabled by default*/
11113 float default_aec_speed = 0;
11114 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11115
11116 /* Manual Convergence AWB Speed is disabled by default*/
11117 float default_awb_speed = 0;
11118 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11119
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011120 // Set instant AEC to normal convergence by default
11121 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11122 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11123
Shuzhen Wang19463d72016-03-08 11:09:52 -080011124 /* hybrid ae */
11125 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11126
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011127 if (gExposeEnableZslKey) {
11128 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11129 }
11130
Thierry Strudel3d639192016-09-09 11:52:26 -070011131 mDefaultMetadata[type] = settings.release();
11132
11133 return mDefaultMetadata[type];
11134}
11135
11136/*===========================================================================
11137 * FUNCTION : setFrameParameters
11138 *
11139 * DESCRIPTION: set parameters per frame as requested in the metadata from
11140 * framework
11141 *
11142 * PARAMETERS :
11143 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011144 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011145 * @blob_request: Whether this request is a blob request or not
11146 *
11147 * RETURN : success: NO_ERROR
11148 * failure:
11149 *==========================================================================*/
11150int QCamera3HardwareInterface::setFrameParameters(
11151 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011152 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011153 int blob_request,
11154 uint32_t snapshotStreamId)
11155{
11156 /*translate from camera_metadata_t type to parm_type_t*/
11157 int rc = 0;
11158 int32_t hal_version = CAM_HAL_V3;
11159
11160 clear_metadata_buffer(mParameters);
11161 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11162 LOGE("Failed to set hal version in the parameters");
11163 return BAD_VALUE;
11164 }
11165
11166 /*we need to update the frame number in the parameters*/
11167 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11168 request->frame_number)) {
11169 LOGE("Failed to set the frame number in the parameters");
11170 return BAD_VALUE;
11171 }
11172
11173 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011174 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011175 LOGE("Failed to set stream type mask in the parameters");
11176 return BAD_VALUE;
11177 }
11178
11179 if (mUpdateDebugLevel) {
11180 uint32_t dummyDebugLevel = 0;
11181 /* The value of dummyDebugLevel is irrelavent. On
11182 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11183 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11184 dummyDebugLevel)) {
11185 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11186 return BAD_VALUE;
11187 }
11188 mUpdateDebugLevel = false;
11189 }
11190
11191 if(request->settings != NULL){
11192 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11193 if (blob_request)
11194 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11195 }
11196
11197 return rc;
11198}
11199
11200/*===========================================================================
11201 * FUNCTION : setReprocParameters
11202 *
11203 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11204 * return it.
11205 *
11206 * PARAMETERS :
11207 * @request : request that needs to be serviced
11208 *
11209 * RETURN : success: NO_ERROR
11210 * failure:
11211 *==========================================================================*/
11212int32_t QCamera3HardwareInterface::setReprocParameters(
11213 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11214 uint32_t snapshotStreamId)
11215{
11216 /*translate from camera_metadata_t type to parm_type_t*/
11217 int rc = 0;
11218
11219 if (NULL == request->settings){
11220 LOGE("Reprocess settings cannot be NULL");
11221 return BAD_VALUE;
11222 }
11223
11224 if (NULL == reprocParam) {
11225 LOGE("Invalid reprocessing metadata buffer");
11226 return BAD_VALUE;
11227 }
11228 clear_metadata_buffer(reprocParam);
11229
11230 /*we need to update the frame number in the parameters*/
11231 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11232 request->frame_number)) {
11233 LOGE("Failed to set the frame number in the parameters");
11234 return BAD_VALUE;
11235 }
11236
11237 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11238 if (rc < 0) {
11239 LOGE("Failed to translate reproc request");
11240 return rc;
11241 }
11242
11243 CameraMetadata frame_settings;
11244 frame_settings = request->settings;
11245 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11246 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11247 int32_t *crop_count =
11248 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11249 int32_t *crop_data =
11250 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11251 int32_t *roi_map =
11252 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11253 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11254 cam_crop_data_t crop_meta;
11255 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11256 crop_meta.num_of_streams = 1;
11257 crop_meta.crop_info[0].crop.left = crop_data[0];
11258 crop_meta.crop_info[0].crop.top = crop_data[1];
11259 crop_meta.crop_info[0].crop.width = crop_data[2];
11260 crop_meta.crop_info[0].crop.height = crop_data[3];
11261
11262 crop_meta.crop_info[0].roi_map.left =
11263 roi_map[0];
11264 crop_meta.crop_info[0].roi_map.top =
11265 roi_map[1];
11266 crop_meta.crop_info[0].roi_map.width =
11267 roi_map[2];
11268 crop_meta.crop_info[0].roi_map.height =
11269 roi_map[3];
11270
11271 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11272 rc = BAD_VALUE;
11273 }
11274 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11275 request->input_buffer->stream,
11276 crop_meta.crop_info[0].crop.left,
11277 crop_meta.crop_info[0].crop.top,
11278 crop_meta.crop_info[0].crop.width,
11279 crop_meta.crop_info[0].crop.height);
11280 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11281 request->input_buffer->stream,
11282 crop_meta.crop_info[0].roi_map.left,
11283 crop_meta.crop_info[0].roi_map.top,
11284 crop_meta.crop_info[0].roi_map.width,
11285 crop_meta.crop_info[0].roi_map.height);
11286 } else {
11287 LOGE("Invalid reprocess crop count %d!", *crop_count);
11288 }
11289 } else {
11290 LOGE("No crop data from matching output stream");
11291 }
11292
11293 /* These settings are not needed for regular requests so handle them specially for
11294 reprocess requests; information needed for EXIF tags */
11295 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11296 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11297 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11298 if (NAME_NOT_FOUND != val) {
11299 uint32_t flashMode = (uint32_t)val;
11300 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11301 rc = BAD_VALUE;
11302 }
11303 } else {
11304 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11305 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11306 }
11307 } else {
11308 LOGH("No flash mode in reprocess settings");
11309 }
11310
11311 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11312 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11313 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11314 rc = BAD_VALUE;
11315 }
11316 } else {
11317 LOGH("No flash state in reprocess settings");
11318 }
11319
11320 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11321 uint8_t *reprocessFlags =
11322 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11323 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11324 *reprocessFlags)) {
11325 rc = BAD_VALUE;
11326 }
11327 }
11328
Thierry Strudel54dc9782017-02-15 12:12:10 -080011329 // Add exif debug data to internal metadata
11330 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11331 mm_jpeg_debug_exif_params_t *debug_params =
11332 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11333 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11334 // AE
11335 if (debug_params->ae_debug_params_valid == TRUE) {
11336 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11337 debug_params->ae_debug_params);
11338 }
11339 // AWB
11340 if (debug_params->awb_debug_params_valid == TRUE) {
11341 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11342 debug_params->awb_debug_params);
11343 }
11344 // AF
11345 if (debug_params->af_debug_params_valid == TRUE) {
11346 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11347 debug_params->af_debug_params);
11348 }
11349 // ASD
11350 if (debug_params->asd_debug_params_valid == TRUE) {
11351 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11352 debug_params->asd_debug_params);
11353 }
11354 // Stats
11355 if (debug_params->stats_debug_params_valid == TRUE) {
11356 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11357 debug_params->stats_debug_params);
11358 }
11359 // BE Stats
11360 if (debug_params->bestats_debug_params_valid == TRUE) {
11361 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11362 debug_params->bestats_debug_params);
11363 }
11364 // BHIST
11365 if (debug_params->bhist_debug_params_valid == TRUE) {
11366 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11367 debug_params->bhist_debug_params);
11368 }
11369 // 3A Tuning
11370 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11371 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11372 debug_params->q3a_tuning_debug_params);
11373 }
11374 }
11375
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011376 // Add metadata which reprocess needs
11377 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11378 cam_reprocess_info_t *repro_info =
11379 (cam_reprocess_info_t *)frame_settings.find
11380 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011381 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011382 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011383 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011384 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011385 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011386 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011387 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011388 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011389 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011390 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011391 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011392 repro_info->pipeline_flip);
11393 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11394 repro_info->af_roi);
11395 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11396 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011397 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11398 CAM_INTF_PARM_ROTATION metadata then has been added in
11399 translateToHalMetadata. HAL need to keep this new rotation
11400 metadata. Otherwise, the old rotation info saved in the vendor tag
11401 would be used */
11402 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11403 CAM_INTF_PARM_ROTATION, reprocParam) {
11404 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11405 } else {
11406 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011407 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011408 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011409 }
11410
11411 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11412 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11413 roi.width and roi.height would be the final JPEG size.
11414 For now, HAL only checks this for reprocess request */
11415 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11416 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11417 uint8_t *enable =
11418 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11419 if (*enable == TRUE) {
11420 int32_t *crop_data =
11421 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11422 cam_stream_crop_info_t crop_meta;
11423 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11424 crop_meta.stream_id = 0;
11425 crop_meta.crop.left = crop_data[0];
11426 crop_meta.crop.top = crop_data[1];
11427 crop_meta.crop.width = crop_data[2];
11428 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011429 // The JPEG crop roi should match cpp output size
11430 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11431 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11432 crop_meta.roi_map.left = 0;
11433 crop_meta.roi_map.top = 0;
11434 crop_meta.roi_map.width = cpp_crop->crop.width;
11435 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011436 }
11437 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11438 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011439 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011440 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011441 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11442 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011443 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011444 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11445
11446 // Add JPEG scale information
11447 cam_dimension_t scale_dim;
11448 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11449 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11450 int32_t *roi =
11451 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11452 scale_dim.width = roi[2];
11453 scale_dim.height = roi[3];
11454 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11455 scale_dim);
11456 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11457 scale_dim.width, scale_dim.height, mCameraId);
11458 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011459 }
11460 }
11461
11462 return rc;
11463}
11464
11465/*===========================================================================
11466 * FUNCTION : saveRequestSettings
11467 *
11468 * DESCRIPTION: Add any settings that might have changed to the request settings
11469 * and save the settings to be applied on the frame
11470 *
11471 * PARAMETERS :
11472 * @jpegMetadata : the extracted and/or modified jpeg metadata
11473 * @request : request with initial settings
11474 *
11475 * RETURN :
11476 * camera_metadata_t* : pointer to the saved request settings
11477 *==========================================================================*/
11478camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11479 const CameraMetadata &jpegMetadata,
11480 camera3_capture_request_t *request)
11481{
11482 camera_metadata_t *resultMetadata;
11483 CameraMetadata camMetadata;
11484 camMetadata = request->settings;
11485
11486 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11487 int32_t thumbnail_size[2];
11488 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11489 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11490 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11491 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11492 }
11493
11494 if (request->input_buffer != NULL) {
11495 uint8_t reprocessFlags = 1;
11496 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11497 (uint8_t*)&reprocessFlags,
11498 sizeof(reprocessFlags));
11499 }
11500
11501 resultMetadata = camMetadata.release();
11502 return resultMetadata;
11503}
11504
11505/*===========================================================================
11506 * FUNCTION : setHalFpsRange
11507 *
11508 * DESCRIPTION: set FPS range parameter
11509 *
11510 *
11511 * PARAMETERS :
11512 * @settings : Metadata from framework
11513 * @hal_metadata: Metadata buffer
11514 *
11515 *
11516 * RETURN : success: NO_ERROR
11517 * failure:
11518 *==========================================================================*/
11519int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11520 metadata_buffer_t *hal_metadata)
11521{
11522 int32_t rc = NO_ERROR;
11523 cam_fps_range_t fps_range;
11524 fps_range.min_fps = (float)
11525 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11526 fps_range.max_fps = (float)
11527 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11528 fps_range.video_min_fps = fps_range.min_fps;
11529 fps_range.video_max_fps = fps_range.max_fps;
11530
11531 LOGD("aeTargetFpsRange fps: [%f %f]",
11532 fps_range.min_fps, fps_range.max_fps);
11533 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11534 * follows:
11535 * ---------------------------------------------------------------|
11536 * Video stream is absent in configure_streams |
11537 * (Camcorder preview before the first video record |
11538 * ---------------------------------------------------------------|
11539 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11540 * | | | vid_min/max_fps|
11541 * ---------------------------------------------------------------|
11542 * NO | [ 30, 240] | 240 | [240, 240] |
11543 * |-------------|-------------|----------------|
11544 * | [240, 240] | 240 | [240, 240] |
11545 * ---------------------------------------------------------------|
11546 * Video stream is present in configure_streams |
11547 * ---------------------------------------------------------------|
11548 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11549 * | | | vid_min/max_fps|
11550 * ---------------------------------------------------------------|
11551 * NO | [ 30, 240] | 240 | [240, 240] |
11552 * (camcorder prev |-------------|-------------|----------------|
11553 * after video rec | [240, 240] | 240 | [240, 240] |
11554 * is stopped) | | | |
11555 * ---------------------------------------------------------------|
11556 * YES | [ 30, 240] | 240 | [240, 240] |
11557 * |-------------|-------------|----------------|
11558 * | [240, 240] | 240 | [240, 240] |
11559 * ---------------------------------------------------------------|
11560 * When Video stream is absent in configure_streams,
11561 * preview fps = sensor_fps / batchsize
11562 * Eg: for 240fps at batchSize 4, preview = 60fps
11563 * for 120fps at batchSize 4, preview = 30fps
11564 *
11565 * When video stream is present in configure_streams, preview fps is as per
11566 * the ratio of preview buffers to video buffers requested in process
11567 * capture request
11568 */
11569 mBatchSize = 0;
11570 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11571 fps_range.min_fps = fps_range.video_max_fps;
11572 fps_range.video_min_fps = fps_range.video_max_fps;
11573 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11574 fps_range.max_fps);
11575 if (NAME_NOT_FOUND != val) {
11576 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11577 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11578 return BAD_VALUE;
11579 }
11580
11581 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11582 /* If batchmode is currently in progress and the fps changes,
11583 * set the flag to restart the sensor */
11584 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11585 (mHFRVideoFps != fps_range.max_fps)) {
11586 mNeedSensorRestart = true;
11587 }
11588 mHFRVideoFps = fps_range.max_fps;
11589 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11590 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11591 mBatchSize = MAX_HFR_BATCH_SIZE;
11592 }
11593 }
11594 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11595
11596 }
11597 } else {
11598 /* HFR mode is session param in backend/ISP. This should be reset when
11599 * in non-HFR mode */
11600 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11601 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11602 return BAD_VALUE;
11603 }
11604 }
11605 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11606 return BAD_VALUE;
11607 }
11608 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11609 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11610 return rc;
11611}
11612
11613/*===========================================================================
11614 * FUNCTION : translateToHalMetadata
11615 *
11616 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11617 *
11618 *
11619 * PARAMETERS :
11620 * @request : request sent from framework
11621 *
11622 *
11623 * RETURN : success: NO_ERROR
11624 * failure:
11625 *==========================================================================*/
11626int QCamera3HardwareInterface::translateToHalMetadata
11627 (const camera3_capture_request_t *request,
11628 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011629 uint32_t snapshotStreamId) {
11630 if (request == nullptr || hal_metadata == nullptr) {
11631 return BAD_VALUE;
11632 }
11633
11634 int64_t minFrameDuration = getMinFrameDuration(request);
11635
11636 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11637 minFrameDuration);
11638}
11639
11640int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11641 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11642 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11643
Thierry Strudel3d639192016-09-09 11:52:26 -070011644 int rc = 0;
11645 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011646 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011647
11648 /* Do not change the order of the following list unless you know what you are
11649 * doing.
11650 * The order is laid out in such a way that parameters in the front of the table
11651 * may be used to override the parameters later in the table. Examples are:
11652 * 1. META_MODE should precede AEC/AWB/AF MODE
11653 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11654 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11655 * 4. Any mode should precede it's corresponding settings
11656 */
11657 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11658 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11659 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11660 rc = BAD_VALUE;
11661 }
11662 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11663 if (rc != NO_ERROR) {
11664 LOGE("extractSceneMode failed");
11665 }
11666 }
11667
11668 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11669 uint8_t fwk_aeMode =
11670 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11671 uint8_t aeMode;
11672 int32_t redeye;
11673
11674 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11675 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011676 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11677 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011678 } else {
11679 aeMode = CAM_AE_MODE_ON;
11680 }
11681 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11682 redeye = 1;
11683 } else {
11684 redeye = 0;
11685 }
11686
11687 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11688 fwk_aeMode);
11689 if (NAME_NOT_FOUND != val) {
11690 int32_t flashMode = (int32_t)val;
11691 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11692 }
11693
11694 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11695 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11696 rc = BAD_VALUE;
11697 }
11698 }
11699
11700 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11701 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11702 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11703 fwk_whiteLevel);
11704 if (NAME_NOT_FOUND != val) {
11705 uint8_t whiteLevel = (uint8_t)val;
11706 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11707 rc = BAD_VALUE;
11708 }
11709 }
11710 }
11711
11712 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11713 uint8_t fwk_cacMode =
11714 frame_settings.find(
11715 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11716 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11717 fwk_cacMode);
11718 if (NAME_NOT_FOUND != val) {
11719 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11720 bool entryAvailable = FALSE;
11721 // Check whether Frameworks set CAC mode is supported in device or not
11722 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11723 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11724 entryAvailable = TRUE;
11725 break;
11726 }
11727 }
11728 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11729 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11730 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11731 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11732 if (entryAvailable == FALSE) {
11733 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11734 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11735 } else {
11736 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11737 // High is not supported and so set the FAST as spec say's underlying
11738 // device implementation can be the same for both modes.
11739 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11740 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11741 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11742 // in order to avoid the fps drop due to high quality
11743 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11744 } else {
11745 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11746 }
11747 }
11748 }
11749 LOGD("Final cacMode is %d", cacMode);
11750 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11751 rc = BAD_VALUE;
11752 }
11753 } else {
11754 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11755 }
11756 }
11757
Thierry Strudel2896d122017-02-23 19:18:03 -080011758 char af_value[PROPERTY_VALUE_MAX];
11759 property_get("persist.camera.af.infinity", af_value, "0");
11760
Jason Lee84ae9972017-02-24 13:24:24 -080011761 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011762 if (atoi(af_value) == 0) {
11763 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011764 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011765 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11766 fwk_focusMode);
11767 if (NAME_NOT_FOUND != val) {
11768 uint8_t focusMode = (uint8_t)val;
11769 LOGD("set focus mode %d", focusMode);
11770 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11771 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11772 rc = BAD_VALUE;
11773 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011774 }
11775 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011776 } else {
11777 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11778 LOGE("Focus forced to infinity %d", focusMode);
11779 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11780 rc = BAD_VALUE;
11781 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011782 }
11783
Jason Lee84ae9972017-02-24 13:24:24 -080011784 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11785 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011786 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11787 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11788 focalDistance)) {
11789 rc = BAD_VALUE;
11790 }
11791 }
11792
11793 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11794 uint8_t fwk_antibandingMode =
11795 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11796 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11797 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11798 if (NAME_NOT_FOUND != val) {
11799 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011800 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11801 if (m60HzZone) {
11802 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11803 } else {
11804 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11805 }
11806 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011807 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11808 hal_antibandingMode)) {
11809 rc = BAD_VALUE;
11810 }
11811 }
11812 }
11813
11814 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11815 int32_t expCompensation = frame_settings.find(
11816 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11817 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11818 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11819 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11820 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011821 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011822 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11823 expCompensation)) {
11824 rc = BAD_VALUE;
11825 }
11826 }
11827
11828 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11829 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11830 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11831 rc = BAD_VALUE;
11832 }
11833 }
11834 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11835 rc = setHalFpsRange(frame_settings, hal_metadata);
11836 if (rc != NO_ERROR) {
11837 LOGE("setHalFpsRange failed");
11838 }
11839 }
11840
11841 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11842 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11843 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11844 rc = BAD_VALUE;
11845 }
11846 }
11847
11848 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11849 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11850 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11851 fwk_effectMode);
11852 if (NAME_NOT_FOUND != val) {
11853 uint8_t effectMode = (uint8_t)val;
11854 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11855 rc = BAD_VALUE;
11856 }
11857 }
11858 }
11859
11860 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11861 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11862 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11863 colorCorrectMode)) {
11864 rc = BAD_VALUE;
11865 }
11866 }
11867
11868 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11869 cam_color_correct_gains_t colorCorrectGains;
11870 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11871 colorCorrectGains.gains[i] =
11872 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11873 }
11874 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11875 colorCorrectGains)) {
11876 rc = BAD_VALUE;
11877 }
11878 }
11879
11880 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11881 cam_color_correct_matrix_t colorCorrectTransform;
11882 cam_rational_type_t transform_elem;
11883 size_t num = 0;
11884 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11885 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11886 transform_elem.numerator =
11887 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11888 transform_elem.denominator =
11889 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11890 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11891 num++;
11892 }
11893 }
11894 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11895 colorCorrectTransform)) {
11896 rc = BAD_VALUE;
11897 }
11898 }
11899
11900 cam_trigger_t aecTrigger;
11901 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11902 aecTrigger.trigger_id = -1;
11903 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11904 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11905 aecTrigger.trigger =
11906 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11907 aecTrigger.trigger_id =
11908 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11909 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11910 aecTrigger)) {
11911 rc = BAD_VALUE;
11912 }
11913 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11914 aecTrigger.trigger, aecTrigger.trigger_id);
11915 }
11916
11917 /*af_trigger must come with a trigger id*/
11918 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11919 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11920 cam_trigger_t af_trigger;
11921 af_trigger.trigger =
11922 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11923 af_trigger.trigger_id =
11924 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11925 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11926 rc = BAD_VALUE;
11927 }
11928 LOGD("AfTrigger: %d AfTriggerID: %d",
11929 af_trigger.trigger, af_trigger.trigger_id);
11930 }
11931
11932 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11933 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
11934 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
11935 rc = BAD_VALUE;
11936 }
11937 }
11938 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
11939 cam_edge_application_t edge_application;
11940 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080011941
Thierry Strudel3d639192016-09-09 11:52:26 -070011942 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
11943 edge_application.sharpness = 0;
11944 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080011945 edge_application.sharpness =
11946 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
11947 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
11948 int32_t sharpness =
11949 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
11950 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
11951 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
11952 LOGD("Setting edge mode sharpness %d", sharpness);
11953 edge_application.sharpness = sharpness;
11954 }
11955 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011956 }
11957 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
11958 rc = BAD_VALUE;
11959 }
11960 }
11961
11962 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11963 int32_t respectFlashMode = 1;
11964 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11965 uint8_t fwk_aeMode =
11966 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011967 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
11968 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
11969 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011970 respectFlashMode = 0;
11971 LOGH("AE Mode controls flash, ignore android.flash.mode");
11972 }
11973 }
11974 if (respectFlashMode) {
11975 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11976 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11977 LOGH("flash mode after mapping %d", val);
11978 // To check: CAM_INTF_META_FLASH_MODE usage
11979 if (NAME_NOT_FOUND != val) {
11980 uint8_t flashMode = (uint8_t)val;
11981 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
11982 rc = BAD_VALUE;
11983 }
11984 }
11985 }
11986 }
11987
11988 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
11989 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
11990 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
11991 rc = BAD_VALUE;
11992 }
11993 }
11994
11995 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
11996 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
11997 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
11998 flashFiringTime)) {
11999 rc = BAD_VALUE;
12000 }
12001 }
12002
12003 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12004 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12005 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12006 hotPixelMode)) {
12007 rc = BAD_VALUE;
12008 }
12009 }
12010
12011 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12012 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12013 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12014 lensAperture)) {
12015 rc = BAD_VALUE;
12016 }
12017 }
12018
12019 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12020 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12021 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12022 filterDensity)) {
12023 rc = BAD_VALUE;
12024 }
12025 }
12026
12027 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12028 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12029 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12030 focalLength)) {
12031 rc = BAD_VALUE;
12032 }
12033 }
12034
12035 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12036 uint8_t optStabMode =
12037 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12038 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12039 optStabMode)) {
12040 rc = BAD_VALUE;
12041 }
12042 }
12043
12044 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12045 uint8_t videoStabMode =
12046 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12047 LOGD("videoStabMode from APP = %d", videoStabMode);
12048 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12049 videoStabMode)) {
12050 rc = BAD_VALUE;
12051 }
12052 }
12053
12054
12055 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12056 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12057 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12058 noiseRedMode)) {
12059 rc = BAD_VALUE;
12060 }
12061 }
12062
12063 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12064 float reprocessEffectiveExposureFactor =
12065 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12066 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12067 reprocessEffectiveExposureFactor)) {
12068 rc = BAD_VALUE;
12069 }
12070 }
12071
12072 cam_crop_region_t scalerCropRegion;
12073 bool scalerCropSet = false;
12074 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12075 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12076 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12077 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12078 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12079
12080 // Map coordinate system from active array to sensor output.
12081 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12082 scalerCropRegion.width, scalerCropRegion.height);
12083
12084 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12085 scalerCropRegion)) {
12086 rc = BAD_VALUE;
12087 }
12088 scalerCropSet = true;
12089 }
12090
12091 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12092 int64_t sensorExpTime =
12093 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12094 LOGD("setting sensorExpTime %lld", sensorExpTime);
12095 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12096 sensorExpTime)) {
12097 rc = BAD_VALUE;
12098 }
12099 }
12100
12101 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12102 int64_t sensorFrameDuration =
12103 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012104 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12105 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12106 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12107 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12108 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12109 sensorFrameDuration)) {
12110 rc = BAD_VALUE;
12111 }
12112 }
12113
12114 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12115 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12116 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12117 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12118 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12119 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12120 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12121 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12122 sensorSensitivity)) {
12123 rc = BAD_VALUE;
12124 }
12125 }
12126
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012127#ifndef USE_HAL_3_3
12128 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12129 int32_t ispSensitivity =
12130 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12131 if (ispSensitivity <
12132 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12133 ispSensitivity =
12134 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12135 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12136 }
12137 if (ispSensitivity >
12138 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12139 ispSensitivity =
12140 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12141 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12142 }
12143 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12144 ispSensitivity)) {
12145 rc = BAD_VALUE;
12146 }
12147 }
12148#endif
12149
Thierry Strudel3d639192016-09-09 11:52:26 -070012150 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12151 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12152 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12153 rc = BAD_VALUE;
12154 }
12155 }
12156
12157 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12158 uint8_t fwk_facedetectMode =
12159 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12160
12161 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12162 fwk_facedetectMode);
12163
12164 if (NAME_NOT_FOUND != val) {
12165 uint8_t facedetectMode = (uint8_t)val;
12166 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12167 facedetectMode)) {
12168 rc = BAD_VALUE;
12169 }
12170 }
12171 }
12172
Thierry Strudel54dc9782017-02-15 12:12:10 -080012173 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012174 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012175 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012176 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12177 histogramMode)) {
12178 rc = BAD_VALUE;
12179 }
12180 }
12181
12182 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12183 uint8_t sharpnessMapMode =
12184 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12185 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12186 sharpnessMapMode)) {
12187 rc = BAD_VALUE;
12188 }
12189 }
12190
12191 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12192 uint8_t tonemapMode =
12193 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12194 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12195 rc = BAD_VALUE;
12196 }
12197 }
12198 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12199 /*All tonemap channels will have the same number of points*/
12200 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12201 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12202 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12203 cam_rgb_tonemap_curves tonemapCurves;
12204 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12205 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12206 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12207 tonemapCurves.tonemap_points_cnt,
12208 CAM_MAX_TONEMAP_CURVE_SIZE);
12209 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12210 }
12211
12212 /* ch0 = G*/
12213 size_t point = 0;
12214 cam_tonemap_curve_t tonemapCurveGreen;
12215 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12216 for (size_t j = 0; j < 2; j++) {
12217 tonemapCurveGreen.tonemap_points[i][j] =
12218 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12219 point++;
12220 }
12221 }
12222 tonemapCurves.curves[0] = tonemapCurveGreen;
12223
12224 /* ch 1 = B */
12225 point = 0;
12226 cam_tonemap_curve_t tonemapCurveBlue;
12227 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12228 for (size_t j = 0; j < 2; j++) {
12229 tonemapCurveBlue.tonemap_points[i][j] =
12230 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12231 point++;
12232 }
12233 }
12234 tonemapCurves.curves[1] = tonemapCurveBlue;
12235
12236 /* ch 2 = R */
12237 point = 0;
12238 cam_tonemap_curve_t tonemapCurveRed;
12239 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12240 for (size_t j = 0; j < 2; j++) {
12241 tonemapCurveRed.tonemap_points[i][j] =
12242 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12243 point++;
12244 }
12245 }
12246 tonemapCurves.curves[2] = tonemapCurveRed;
12247
12248 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12249 tonemapCurves)) {
12250 rc = BAD_VALUE;
12251 }
12252 }
12253
12254 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12255 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12256 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12257 captureIntent)) {
12258 rc = BAD_VALUE;
12259 }
12260 }
12261
12262 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12263 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12264 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12265 blackLevelLock)) {
12266 rc = BAD_VALUE;
12267 }
12268 }
12269
12270 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12271 uint8_t lensShadingMapMode =
12272 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12273 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12274 lensShadingMapMode)) {
12275 rc = BAD_VALUE;
12276 }
12277 }
12278
12279 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12280 cam_area_t roi;
12281 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012282 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012283
12284 // Map coordinate system from active array to sensor output.
12285 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12286 roi.rect.height);
12287
12288 if (scalerCropSet) {
12289 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12290 }
12291 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12292 rc = BAD_VALUE;
12293 }
12294 }
12295
12296 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12297 cam_area_t roi;
12298 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012299 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012300
12301 // Map coordinate system from active array to sensor output.
12302 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12303 roi.rect.height);
12304
12305 if (scalerCropSet) {
12306 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12307 }
12308 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12309 rc = BAD_VALUE;
12310 }
12311 }
12312
12313 // CDS for non-HFR non-video mode
12314 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12315 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12316 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12317 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12318 LOGE("Invalid CDS mode %d!", *fwk_cds);
12319 } else {
12320 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12321 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12322 rc = BAD_VALUE;
12323 }
12324 }
12325 }
12326
Thierry Strudel04e026f2016-10-10 11:27:36 -070012327 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012328 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012329 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012330 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12331 }
12332 if (m_bVideoHdrEnabled)
12333 vhdr = CAM_VIDEO_HDR_MODE_ON;
12334
Thierry Strudel54dc9782017-02-15 12:12:10 -080012335 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12336
12337 if(vhdr != curr_hdr_state)
12338 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12339
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012340 rc = setVideoHdrMode(mParameters, vhdr);
12341 if (rc != NO_ERROR) {
12342 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012343 }
12344
12345 //IR
12346 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12347 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12348 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012349 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12350 uint8_t isIRon = 0;
12351
12352 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012353 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12354 LOGE("Invalid IR mode %d!", fwk_ir);
12355 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012356 if(isIRon != curr_ir_state )
12357 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12358
Thierry Strudel04e026f2016-10-10 11:27:36 -070012359 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12360 CAM_INTF_META_IR_MODE, fwk_ir)) {
12361 rc = BAD_VALUE;
12362 }
12363 }
12364 }
12365
Thierry Strudel54dc9782017-02-15 12:12:10 -080012366 //Binning Correction Mode
12367 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12368 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12369 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12370 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12371 || (0 > fwk_binning_correction)) {
12372 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12373 } else {
12374 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12375 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12376 rc = BAD_VALUE;
12377 }
12378 }
12379 }
12380
Thierry Strudel269c81a2016-10-12 12:13:59 -070012381 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12382 float aec_speed;
12383 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12384 LOGD("AEC Speed :%f", aec_speed);
12385 if ( aec_speed < 0 ) {
12386 LOGE("Invalid AEC mode %f!", aec_speed);
12387 } else {
12388 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12389 aec_speed)) {
12390 rc = BAD_VALUE;
12391 }
12392 }
12393 }
12394
12395 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12396 float awb_speed;
12397 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12398 LOGD("AWB Speed :%f", awb_speed);
12399 if ( awb_speed < 0 ) {
12400 LOGE("Invalid AWB mode %f!", awb_speed);
12401 } else {
12402 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12403 awb_speed)) {
12404 rc = BAD_VALUE;
12405 }
12406 }
12407 }
12408
Thierry Strudel3d639192016-09-09 11:52:26 -070012409 // TNR
12410 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12411 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12412 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012413 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012414 cam_denoise_param_t tnr;
12415 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12416 tnr.process_plates =
12417 (cam_denoise_process_type_t)frame_settings.find(
12418 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12419 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012420
12421 if(b_TnrRequested != curr_tnr_state)
12422 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12423
Thierry Strudel3d639192016-09-09 11:52:26 -070012424 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12425 rc = BAD_VALUE;
12426 }
12427 }
12428
Thierry Strudel54dc9782017-02-15 12:12:10 -080012429 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012430 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012431 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012432 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12433 *exposure_metering_mode)) {
12434 rc = BAD_VALUE;
12435 }
12436 }
12437
Thierry Strudel3d639192016-09-09 11:52:26 -070012438 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12439 int32_t fwk_testPatternMode =
12440 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12441 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12442 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12443
12444 if (NAME_NOT_FOUND != testPatternMode) {
12445 cam_test_pattern_data_t testPatternData;
12446 memset(&testPatternData, 0, sizeof(testPatternData));
12447 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12448 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12449 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12450 int32_t *fwk_testPatternData =
12451 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12452 testPatternData.r = fwk_testPatternData[0];
12453 testPatternData.b = fwk_testPatternData[3];
12454 switch (gCamCapability[mCameraId]->color_arrangement) {
12455 case CAM_FILTER_ARRANGEMENT_RGGB:
12456 case CAM_FILTER_ARRANGEMENT_GRBG:
12457 testPatternData.gr = fwk_testPatternData[1];
12458 testPatternData.gb = fwk_testPatternData[2];
12459 break;
12460 case CAM_FILTER_ARRANGEMENT_GBRG:
12461 case CAM_FILTER_ARRANGEMENT_BGGR:
12462 testPatternData.gr = fwk_testPatternData[2];
12463 testPatternData.gb = fwk_testPatternData[1];
12464 break;
12465 default:
12466 LOGE("color arrangement %d is not supported",
12467 gCamCapability[mCameraId]->color_arrangement);
12468 break;
12469 }
12470 }
12471 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12472 testPatternData)) {
12473 rc = BAD_VALUE;
12474 }
12475 } else {
12476 LOGE("Invalid framework sensor test pattern mode %d",
12477 fwk_testPatternMode);
12478 }
12479 }
12480
12481 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12482 size_t count = 0;
12483 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12484 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12485 gps_coords.data.d, gps_coords.count, count);
12486 if (gps_coords.count != count) {
12487 rc = BAD_VALUE;
12488 }
12489 }
12490
12491 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12492 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12493 size_t count = 0;
12494 const char *gps_methods_src = (const char *)
12495 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12496 memset(gps_methods, '\0', sizeof(gps_methods));
12497 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12498 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12499 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12500 if (GPS_PROCESSING_METHOD_SIZE != count) {
12501 rc = BAD_VALUE;
12502 }
12503 }
12504
12505 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12506 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12507 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12508 gps_timestamp)) {
12509 rc = BAD_VALUE;
12510 }
12511 }
12512
12513 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12514 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12515 cam_rotation_info_t rotation_info;
12516 if (orientation == 0) {
12517 rotation_info.rotation = ROTATE_0;
12518 } else if (orientation == 90) {
12519 rotation_info.rotation = ROTATE_90;
12520 } else if (orientation == 180) {
12521 rotation_info.rotation = ROTATE_180;
12522 } else if (orientation == 270) {
12523 rotation_info.rotation = ROTATE_270;
12524 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012525 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012526 rotation_info.streamId = snapshotStreamId;
12527 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12528 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12529 rc = BAD_VALUE;
12530 }
12531 }
12532
12533 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12534 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12535 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12536 rc = BAD_VALUE;
12537 }
12538 }
12539
12540 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12541 uint32_t thumb_quality = (uint32_t)
12542 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12543 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12544 thumb_quality)) {
12545 rc = BAD_VALUE;
12546 }
12547 }
12548
12549 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12550 cam_dimension_t dim;
12551 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12552 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12553 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12554 rc = BAD_VALUE;
12555 }
12556 }
12557
12558 // Internal metadata
12559 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12560 size_t count = 0;
12561 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12562 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12563 privatedata.data.i32, privatedata.count, count);
12564 if (privatedata.count != count) {
12565 rc = BAD_VALUE;
12566 }
12567 }
12568
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012569 // ISO/Exposure Priority
12570 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12571 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12572 cam_priority_mode_t mode =
12573 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12574 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12575 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12576 use_iso_exp_pty.previewOnly = FALSE;
12577 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12578 use_iso_exp_pty.value = *ptr;
12579
12580 if(CAM_ISO_PRIORITY == mode) {
12581 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12582 use_iso_exp_pty)) {
12583 rc = BAD_VALUE;
12584 }
12585 }
12586 else {
12587 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12588 use_iso_exp_pty)) {
12589 rc = BAD_VALUE;
12590 }
12591 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012592
12593 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12594 rc = BAD_VALUE;
12595 }
12596 }
12597 } else {
12598 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12599 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012600 }
12601 }
12602
12603 // Saturation
12604 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12605 int32_t* use_saturation =
12606 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12607 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12608 rc = BAD_VALUE;
12609 }
12610 }
12611
Thierry Strudel3d639192016-09-09 11:52:26 -070012612 // EV step
12613 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12614 gCamCapability[mCameraId]->exp_compensation_step)) {
12615 rc = BAD_VALUE;
12616 }
12617
12618 // CDS info
12619 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12620 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12621 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12622
12623 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12624 CAM_INTF_META_CDS_DATA, *cdsData)) {
12625 rc = BAD_VALUE;
12626 }
12627 }
12628
Shuzhen Wang19463d72016-03-08 11:09:52 -080012629 // Hybrid AE
12630 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12631 uint8_t *hybrid_ae = (uint8_t *)
12632 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12633
12634 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12635 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12636 rc = BAD_VALUE;
12637 }
12638 }
12639
Shuzhen Wang14415f52016-11-16 18:26:18 -080012640 // Histogram
12641 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12642 uint8_t histogramMode =
12643 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12644 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12645 histogramMode)) {
12646 rc = BAD_VALUE;
12647 }
12648 }
12649
12650 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12651 int32_t histogramBins =
12652 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12653 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12654 histogramBins)) {
12655 rc = BAD_VALUE;
12656 }
12657 }
12658
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012659 // Tracking AF
12660 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12661 uint8_t trackingAfTrigger =
12662 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12663 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12664 trackingAfTrigger)) {
12665 rc = BAD_VALUE;
12666 }
12667 }
12668
Thierry Strudel3d639192016-09-09 11:52:26 -070012669 return rc;
12670}
12671
12672/*===========================================================================
12673 * FUNCTION : captureResultCb
12674 *
12675 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12676 *
12677 * PARAMETERS :
12678 * @frame : frame information from mm-camera-interface
12679 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12680 * @userdata: userdata
12681 *
12682 * RETURN : NONE
12683 *==========================================================================*/
12684void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12685 camera3_stream_buffer_t *buffer,
12686 uint32_t frame_number, bool isInputBuffer, void *userdata)
12687{
12688 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12689 if (hw == NULL) {
12690 LOGE("Invalid hw %p", hw);
12691 return;
12692 }
12693
12694 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12695 return;
12696}
12697
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012698/*===========================================================================
12699 * FUNCTION : setBufferErrorStatus
12700 *
12701 * DESCRIPTION: Callback handler for channels to report any buffer errors
12702 *
12703 * PARAMETERS :
12704 * @ch : Channel on which buffer error is reported from
12705 * @frame_number : frame number on which buffer error is reported on
12706 * @buffer_status : buffer error status
12707 * @userdata: userdata
12708 *
12709 * RETURN : NONE
12710 *==========================================================================*/
12711void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12712 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12713{
12714 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12715 if (hw == NULL) {
12716 LOGE("Invalid hw %p", hw);
12717 return;
12718 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012719
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012720 hw->setBufferErrorStatus(ch, frame_number, err);
12721 return;
12722}
12723
12724void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12725 uint32_t frameNumber, camera3_buffer_status_t err)
12726{
12727 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12728 pthread_mutex_lock(&mMutex);
12729
12730 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12731 if (req.frame_number != frameNumber)
12732 continue;
12733 for (auto& k : req.mPendingBufferList) {
12734 if(k.stream->priv == ch) {
12735 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12736 }
12737 }
12738 }
12739
12740 pthread_mutex_unlock(&mMutex);
12741 return;
12742}
Thierry Strudel3d639192016-09-09 11:52:26 -070012743/*===========================================================================
12744 * FUNCTION : initialize
12745 *
12746 * DESCRIPTION: Pass framework callback pointers to HAL
12747 *
12748 * PARAMETERS :
12749 *
12750 *
12751 * RETURN : Success : 0
12752 * Failure: -ENODEV
12753 *==========================================================================*/
12754
12755int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12756 const camera3_callback_ops_t *callback_ops)
12757{
12758 LOGD("E");
12759 QCamera3HardwareInterface *hw =
12760 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12761 if (!hw) {
12762 LOGE("NULL camera device");
12763 return -ENODEV;
12764 }
12765
12766 int rc = hw->initialize(callback_ops);
12767 LOGD("X");
12768 return rc;
12769}
12770
12771/*===========================================================================
12772 * FUNCTION : configure_streams
12773 *
12774 * DESCRIPTION:
12775 *
12776 * PARAMETERS :
12777 *
12778 *
12779 * RETURN : Success: 0
12780 * Failure: -EINVAL (if stream configuration is invalid)
12781 * -ENODEV (fatal error)
12782 *==========================================================================*/
12783
12784int QCamera3HardwareInterface::configure_streams(
12785 const struct camera3_device *device,
12786 camera3_stream_configuration_t *stream_list)
12787{
12788 LOGD("E");
12789 QCamera3HardwareInterface *hw =
12790 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12791 if (!hw) {
12792 LOGE("NULL camera device");
12793 return -ENODEV;
12794 }
12795 int rc = hw->configureStreams(stream_list);
12796 LOGD("X");
12797 return rc;
12798}
12799
12800/*===========================================================================
12801 * FUNCTION : construct_default_request_settings
12802 *
12803 * DESCRIPTION: Configure a settings buffer to meet the required use case
12804 *
12805 * PARAMETERS :
12806 *
12807 *
12808 * RETURN : Success: Return valid metadata
12809 * Failure: Return NULL
12810 *==========================================================================*/
12811const camera_metadata_t* QCamera3HardwareInterface::
12812 construct_default_request_settings(const struct camera3_device *device,
12813 int type)
12814{
12815
12816 LOGD("E");
12817 camera_metadata_t* fwk_metadata = NULL;
12818 QCamera3HardwareInterface *hw =
12819 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12820 if (!hw) {
12821 LOGE("NULL camera device");
12822 return NULL;
12823 }
12824
12825 fwk_metadata = hw->translateCapabilityToMetadata(type);
12826
12827 LOGD("X");
12828 return fwk_metadata;
12829}
12830
12831/*===========================================================================
12832 * FUNCTION : process_capture_request
12833 *
12834 * DESCRIPTION:
12835 *
12836 * PARAMETERS :
12837 *
12838 *
12839 * RETURN :
12840 *==========================================================================*/
12841int QCamera3HardwareInterface::process_capture_request(
12842 const struct camera3_device *device,
12843 camera3_capture_request_t *request)
12844{
12845 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012846 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012847 QCamera3HardwareInterface *hw =
12848 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12849 if (!hw) {
12850 LOGE("NULL camera device");
12851 return -EINVAL;
12852 }
12853
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012854 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012855 LOGD("X");
12856 return rc;
12857}
12858
12859/*===========================================================================
12860 * FUNCTION : dump
12861 *
12862 * DESCRIPTION:
12863 *
12864 * PARAMETERS :
12865 *
12866 *
12867 * RETURN :
12868 *==========================================================================*/
12869
12870void QCamera3HardwareInterface::dump(
12871 const struct camera3_device *device, int fd)
12872{
12873 /* Log level property is read when "adb shell dumpsys media.camera" is
12874 called so that the log level can be controlled without restarting
12875 the media server */
12876 getLogLevel();
12877
12878 LOGD("E");
12879 QCamera3HardwareInterface *hw =
12880 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12881 if (!hw) {
12882 LOGE("NULL camera device");
12883 return;
12884 }
12885
12886 hw->dump(fd);
12887 LOGD("X");
12888 return;
12889}
12890
12891/*===========================================================================
12892 * FUNCTION : flush
12893 *
12894 * DESCRIPTION:
12895 *
12896 * PARAMETERS :
12897 *
12898 *
12899 * RETURN :
12900 *==========================================================================*/
12901
12902int QCamera3HardwareInterface::flush(
12903 const struct camera3_device *device)
12904{
12905 int rc;
12906 LOGD("E");
12907 QCamera3HardwareInterface *hw =
12908 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12909 if (!hw) {
12910 LOGE("NULL camera device");
12911 return -EINVAL;
12912 }
12913
12914 pthread_mutex_lock(&hw->mMutex);
12915 // Validate current state
12916 switch (hw->mState) {
12917 case STARTED:
12918 /* valid state */
12919 break;
12920
12921 case ERROR:
12922 pthread_mutex_unlock(&hw->mMutex);
12923 hw->handleCameraDeviceError();
12924 return -ENODEV;
12925
12926 default:
12927 LOGI("Flush returned during state %d", hw->mState);
12928 pthread_mutex_unlock(&hw->mMutex);
12929 return 0;
12930 }
12931 pthread_mutex_unlock(&hw->mMutex);
12932
12933 rc = hw->flush(true /* restart channels */ );
12934 LOGD("X");
12935 return rc;
12936}
12937
12938/*===========================================================================
12939 * FUNCTION : close_camera_device
12940 *
12941 * DESCRIPTION:
12942 *
12943 * PARAMETERS :
12944 *
12945 *
12946 * RETURN :
12947 *==========================================================================*/
12948int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
12949{
12950 int ret = NO_ERROR;
12951 QCamera3HardwareInterface *hw =
12952 reinterpret_cast<QCamera3HardwareInterface *>(
12953 reinterpret_cast<camera3_device_t *>(device)->priv);
12954 if (!hw) {
12955 LOGE("NULL camera device");
12956 return BAD_VALUE;
12957 }
12958
12959 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
12960 delete hw;
12961 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012962 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070012963 return ret;
12964}
12965
12966/*===========================================================================
12967 * FUNCTION : getWaveletDenoiseProcessPlate
12968 *
12969 * DESCRIPTION: query wavelet denoise process plate
12970 *
12971 * PARAMETERS : None
12972 *
12973 * RETURN : WNR prcocess plate value
12974 *==========================================================================*/
12975cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
12976{
12977 char prop[PROPERTY_VALUE_MAX];
12978 memset(prop, 0, sizeof(prop));
12979 property_get("persist.denoise.process.plates", prop, "0");
12980 int processPlate = atoi(prop);
12981 switch(processPlate) {
12982 case 0:
12983 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
12984 case 1:
12985 return CAM_WAVELET_DENOISE_CBCR_ONLY;
12986 case 2:
12987 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12988 case 3:
12989 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
12990 default:
12991 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
12992 }
12993}
12994
12995
12996/*===========================================================================
12997 * FUNCTION : getTemporalDenoiseProcessPlate
12998 *
12999 * DESCRIPTION: query temporal denoise process plate
13000 *
13001 * PARAMETERS : None
13002 *
13003 * RETURN : TNR prcocess plate value
13004 *==========================================================================*/
13005cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13006{
13007 char prop[PROPERTY_VALUE_MAX];
13008 memset(prop, 0, sizeof(prop));
13009 property_get("persist.tnr.process.plates", prop, "0");
13010 int processPlate = atoi(prop);
13011 switch(processPlate) {
13012 case 0:
13013 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13014 case 1:
13015 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13016 case 2:
13017 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13018 case 3:
13019 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13020 default:
13021 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13022 }
13023}
13024
13025
13026/*===========================================================================
13027 * FUNCTION : extractSceneMode
13028 *
13029 * DESCRIPTION: Extract scene mode from frameworks set metadata
13030 *
13031 * PARAMETERS :
13032 * @frame_settings: CameraMetadata reference
13033 * @metaMode: ANDROID_CONTORL_MODE
13034 * @hal_metadata: hal metadata structure
13035 *
13036 * RETURN : None
13037 *==========================================================================*/
13038int32_t QCamera3HardwareInterface::extractSceneMode(
13039 const CameraMetadata &frame_settings, uint8_t metaMode,
13040 metadata_buffer_t *hal_metadata)
13041{
13042 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013043 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13044
13045 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13046 LOGD("Ignoring control mode OFF_KEEP_STATE");
13047 return NO_ERROR;
13048 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013049
13050 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13051 camera_metadata_ro_entry entry =
13052 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13053 if (0 == entry.count)
13054 return rc;
13055
13056 uint8_t fwk_sceneMode = entry.data.u8[0];
13057
13058 int val = lookupHalName(SCENE_MODES_MAP,
13059 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13060 fwk_sceneMode);
13061 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013062 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013063 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013064 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013065 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013066
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013067 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13068 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13069 }
13070
13071 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13072 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013073 cam_hdr_param_t hdr_params;
13074 hdr_params.hdr_enable = 1;
13075 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13076 hdr_params.hdr_need_1x = false;
13077 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13078 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13079 rc = BAD_VALUE;
13080 }
13081 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013082
Thierry Strudel3d639192016-09-09 11:52:26 -070013083 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13084 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13085 rc = BAD_VALUE;
13086 }
13087 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013088
13089 if (mForceHdrSnapshot) {
13090 cam_hdr_param_t hdr_params;
13091 hdr_params.hdr_enable = 1;
13092 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13093 hdr_params.hdr_need_1x = false;
13094 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13095 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13096 rc = BAD_VALUE;
13097 }
13098 }
13099
Thierry Strudel3d639192016-09-09 11:52:26 -070013100 return rc;
13101}
13102
13103/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013104 * FUNCTION : setVideoHdrMode
13105 *
13106 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13107 *
13108 * PARAMETERS :
13109 * @hal_metadata: hal metadata structure
13110 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13111 *
13112 * RETURN : None
13113 *==========================================================================*/
13114int32_t QCamera3HardwareInterface::setVideoHdrMode(
13115 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13116{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013117 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13118 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13119 }
13120
13121 LOGE("Invalid Video HDR mode %d!", vhdr);
13122 return BAD_VALUE;
13123}
13124
13125/*===========================================================================
13126 * FUNCTION : setSensorHDR
13127 *
13128 * DESCRIPTION: Enable/disable sensor HDR.
13129 *
13130 * PARAMETERS :
13131 * @hal_metadata: hal metadata structure
13132 * @enable: boolean whether to enable/disable sensor HDR
13133 *
13134 * RETURN : None
13135 *==========================================================================*/
13136int32_t QCamera3HardwareInterface::setSensorHDR(
13137 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13138{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013139 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013140 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13141
13142 if (enable) {
13143 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13144 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13145 #ifdef _LE_CAMERA_
13146 //Default to staggered HDR for IOT
13147 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13148 #else
13149 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13150 #endif
13151 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13152 }
13153
13154 bool isSupported = false;
13155 switch (sensor_hdr) {
13156 case CAM_SENSOR_HDR_IN_SENSOR:
13157 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13158 CAM_QCOM_FEATURE_SENSOR_HDR) {
13159 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013160 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013161 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013162 break;
13163 case CAM_SENSOR_HDR_ZIGZAG:
13164 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13165 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13166 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013167 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013168 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013169 break;
13170 case CAM_SENSOR_HDR_STAGGERED:
13171 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13172 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13173 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013174 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013175 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013176 break;
13177 case CAM_SENSOR_HDR_OFF:
13178 isSupported = true;
13179 LOGD("Turning off sensor HDR");
13180 break;
13181 default:
13182 LOGE("HDR mode %d not supported", sensor_hdr);
13183 rc = BAD_VALUE;
13184 break;
13185 }
13186
13187 if(isSupported) {
13188 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13189 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13190 rc = BAD_VALUE;
13191 } else {
13192 if(!isVideoHdrEnable)
13193 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013194 }
13195 }
13196 return rc;
13197}
13198
13199/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013200 * FUNCTION : needRotationReprocess
13201 *
13202 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13203 *
13204 * PARAMETERS : none
13205 *
13206 * RETURN : true: needed
13207 * false: no need
13208 *==========================================================================*/
13209bool QCamera3HardwareInterface::needRotationReprocess()
13210{
13211 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13212 // current rotation is not zero, and pp has the capability to process rotation
13213 LOGH("need do reprocess for rotation");
13214 return true;
13215 }
13216
13217 return false;
13218}
13219
13220/*===========================================================================
13221 * FUNCTION : needReprocess
13222 *
13223 * DESCRIPTION: if reprocess in needed
13224 *
13225 * PARAMETERS : none
13226 *
13227 * RETURN : true: needed
13228 * false: no need
13229 *==========================================================================*/
13230bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13231{
13232 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13233 // TODO: add for ZSL HDR later
13234 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13235 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13236 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13237 return true;
13238 } else {
13239 LOGH("already post processed frame");
13240 return false;
13241 }
13242 }
13243 return needRotationReprocess();
13244}
13245
13246/*===========================================================================
13247 * FUNCTION : needJpegExifRotation
13248 *
13249 * DESCRIPTION: if rotation from jpeg is needed
13250 *
13251 * PARAMETERS : none
13252 *
13253 * RETURN : true: needed
13254 * false: no need
13255 *==========================================================================*/
13256bool QCamera3HardwareInterface::needJpegExifRotation()
13257{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013258 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013259 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13260 LOGD("Need use Jpeg EXIF Rotation");
13261 return true;
13262 }
13263 return false;
13264}
13265
13266/*===========================================================================
13267 * FUNCTION : addOfflineReprocChannel
13268 *
13269 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13270 * coming from input channel
13271 *
13272 * PARAMETERS :
13273 * @config : reprocess configuration
13274 * @inputChHandle : pointer to the input (source) channel
13275 *
13276 *
13277 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13278 *==========================================================================*/
13279QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13280 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13281{
13282 int32_t rc = NO_ERROR;
13283 QCamera3ReprocessChannel *pChannel = NULL;
13284
13285 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013286 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13287 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013288 if (NULL == pChannel) {
13289 LOGE("no mem for reprocess channel");
13290 return NULL;
13291 }
13292
13293 rc = pChannel->initialize(IS_TYPE_NONE);
13294 if (rc != NO_ERROR) {
13295 LOGE("init reprocess channel failed, ret = %d", rc);
13296 delete pChannel;
13297 return NULL;
13298 }
13299
13300 // pp feature config
13301 cam_pp_feature_config_t pp_config;
13302 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13303
13304 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13305 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13306 & CAM_QCOM_FEATURE_DSDN) {
13307 //Use CPP CDS incase h/w supports it.
13308 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13309 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13310 }
13311 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13312 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13313 }
13314
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013315 if (config.hdr_param.hdr_enable) {
13316 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13317 pp_config.hdr_param = config.hdr_param;
13318 }
13319
13320 if (mForceHdrSnapshot) {
13321 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13322 pp_config.hdr_param.hdr_enable = 1;
13323 pp_config.hdr_param.hdr_need_1x = 0;
13324 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13325 }
13326
Thierry Strudel3d639192016-09-09 11:52:26 -070013327 rc = pChannel->addReprocStreamsFromSource(pp_config,
13328 config,
13329 IS_TYPE_NONE,
13330 mMetadataChannel);
13331
13332 if (rc != NO_ERROR) {
13333 delete pChannel;
13334 return NULL;
13335 }
13336 return pChannel;
13337}
13338
13339/*===========================================================================
13340 * FUNCTION : getMobicatMask
13341 *
13342 * DESCRIPTION: returns mobicat mask
13343 *
13344 * PARAMETERS : none
13345 *
13346 * RETURN : mobicat mask
13347 *
13348 *==========================================================================*/
13349uint8_t QCamera3HardwareInterface::getMobicatMask()
13350{
13351 return m_MobicatMask;
13352}
13353
13354/*===========================================================================
13355 * FUNCTION : setMobicat
13356 *
13357 * DESCRIPTION: set Mobicat on/off.
13358 *
13359 * PARAMETERS :
13360 * @params : none
13361 *
13362 * RETURN : int32_t type of status
13363 * NO_ERROR -- success
13364 * none-zero failure code
13365 *==========================================================================*/
13366int32_t QCamera3HardwareInterface::setMobicat()
13367{
13368 char value [PROPERTY_VALUE_MAX];
13369 property_get("persist.camera.mobicat", value, "0");
13370 int32_t ret = NO_ERROR;
13371 uint8_t enableMobi = (uint8_t)atoi(value);
13372
13373 if (enableMobi) {
13374 tune_cmd_t tune_cmd;
13375 tune_cmd.type = SET_RELOAD_CHROMATIX;
13376 tune_cmd.module = MODULE_ALL;
13377 tune_cmd.value = TRUE;
13378 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13379 CAM_INTF_PARM_SET_VFE_COMMAND,
13380 tune_cmd);
13381
13382 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13383 CAM_INTF_PARM_SET_PP_COMMAND,
13384 tune_cmd);
13385 }
13386 m_MobicatMask = enableMobi;
13387
13388 return ret;
13389}
13390
13391/*===========================================================================
13392* FUNCTION : getLogLevel
13393*
13394* DESCRIPTION: Reads the log level property into a variable
13395*
13396* PARAMETERS :
13397* None
13398*
13399* RETURN :
13400* None
13401*==========================================================================*/
13402void QCamera3HardwareInterface::getLogLevel()
13403{
13404 char prop[PROPERTY_VALUE_MAX];
13405 uint32_t globalLogLevel = 0;
13406
13407 property_get("persist.camera.hal.debug", prop, "0");
13408 int val = atoi(prop);
13409 if (0 <= val) {
13410 gCamHal3LogLevel = (uint32_t)val;
13411 }
13412
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013413 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013414 gKpiDebugLevel = atoi(prop);
13415
13416 property_get("persist.camera.global.debug", prop, "0");
13417 val = atoi(prop);
13418 if (0 <= val) {
13419 globalLogLevel = (uint32_t)val;
13420 }
13421
13422 /* Highest log level among hal.logs and global.logs is selected */
13423 if (gCamHal3LogLevel < globalLogLevel)
13424 gCamHal3LogLevel = globalLogLevel;
13425
13426 return;
13427}
13428
13429/*===========================================================================
13430 * FUNCTION : validateStreamRotations
13431 *
13432 * DESCRIPTION: Check if the rotations requested are supported
13433 *
13434 * PARAMETERS :
13435 * @stream_list : streams to be configured
13436 *
13437 * RETURN : NO_ERROR on success
13438 * -EINVAL on failure
13439 *
13440 *==========================================================================*/
13441int QCamera3HardwareInterface::validateStreamRotations(
13442 camera3_stream_configuration_t *streamList)
13443{
13444 int rc = NO_ERROR;
13445
13446 /*
13447 * Loop through all streams requested in configuration
13448 * Check if unsupported rotations have been requested on any of them
13449 */
13450 for (size_t j = 0; j < streamList->num_streams; j++){
13451 camera3_stream_t *newStream = streamList->streams[j];
13452
13453 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13454 bool isImplDef = (newStream->format ==
13455 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13456 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13457 isImplDef);
13458
13459 if (isRotated && (!isImplDef || isZsl)) {
13460 LOGE("Error: Unsupported rotation of %d requested for stream"
13461 "type:%d and stream format:%d",
13462 newStream->rotation, newStream->stream_type,
13463 newStream->format);
13464 rc = -EINVAL;
13465 break;
13466 }
13467 }
13468
13469 return rc;
13470}
13471
13472/*===========================================================================
13473* FUNCTION : getFlashInfo
13474*
13475* DESCRIPTION: Retrieve information about whether the device has a flash.
13476*
13477* PARAMETERS :
13478* @cameraId : Camera id to query
13479* @hasFlash : Boolean indicating whether there is a flash device
13480* associated with given camera
13481* @flashNode : If a flash device exists, this will be its device node.
13482*
13483* RETURN :
13484* None
13485*==========================================================================*/
13486void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13487 bool& hasFlash,
13488 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13489{
13490 cam_capability_t* camCapability = gCamCapability[cameraId];
13491 if (NULL == camCapability) {
13492 hasFlash = false;
13493 flashNode[0] = '\0';
13494 } else {
13495 hasFlash = camCapability->flash_available;
13496 strlcpy(flashNode,
13497 (char*)camCapability->flash_dev_name,
13498 QCAMERA_MAX_FILEPATH_LENGTH);
13499 }
13500}
13501
13502/*===========================================================================
13503* FUNCTION : getEepromVersionInfo
13504*
13505* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13506*
13507* PARAMETERS : None
13508*
13509* RETURN : string describing EEPROM version
13510* "\0" if no such info available
13511*==========================================================================*/
13512const char *QCamera3HardwareInterface::getEepromVersionInfo()
13513{
13514 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13515}
13516
13517/*===========================================================================
13518* FUNCTION : getLdafCalib
13519*
13520* DESCRIPTION: Retrieve Laser AF calibration data
13521*
13522* PARAMETERS : None
13523*
13524* RETURN : Two uint32_t describing laser AF calibration data
13525* NULL if none is available.
13526*==========================================================================*/
13527const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13528{
13529 if (mLdafCalibExist) {
13530 return &mLdafCalib[0];
13531 } else {
13532 return NULL;
13533 }
13534}
13535
13536/*===========================================================================
13537 * FUNCTION : dynamicUpdateMetaStreamInfo
13538 *
13539 * DESCRIPTION: This function:
13540 * (1) stops all the channels
13541 * (2) returns error on pending requests and buffers
13542 * (3) sends metastream_info in setparams
13543 * (4) starts all channels
13544 * This is useful when sensor has to be restarted to apply any
13545 * settings such as frame rate from a different sensor mode
13546 *
13547 * PARAMETERS : None
13548 *
13549 * RETURN : NO_ERROR on success
13550 * Error codes on failure
13551 *
13552 *==========================================================================*/
13553int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13554{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013555 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013556 int rc = NO_ERROR;
13557
13558 LOGD("E");
13559
13560 rc = stopAllChannels();
13561 if (rc < 0) {
13562 LOGE("stopAllChannels failed");
13563 return rc;
13564 }
13565
13566 rc = notifyErrorForPendingRequests();
13567 if (rc < 0) {
13568 LOGE("notifyErrorForPendingRequests failed");
13569 return rc;
13570 }
13571
13572 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13573 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13574 "Format:%d",
13575 mStreamConfigInfo.type[i],
13576 mStreamConfigInfo.stream_sizes[i].width,
13577 mStreamConfigInfo.stream_sizes[i].height,
13578 mStreamConfigInfo.postprocess_mask[i],
13579 mStreamConfigInfo.format[i]);
13580 }
13581
13582 /* Send meta stream info once again so that ISP can start */
13583 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13584 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13585 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13586 mParameters);
13587 if (rc < 0) {
13588 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13589 }
13590
13591 rc = startAllChannels();
13592 if (rc < 0) {
13593 LOGE("startAllChannels failed");
13594 return rc;
13595 }
13596
13597 LOGD("X");
13598 return rc;
13599}
13600
13601/*===========================================================================
13602 * FUNCTION : stopAllChannels
13603 *
13604 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13605 *
13606 * PARAMETERS : None
13607 *
13608 * RETURN : NO_ERROR on success
13609 * Error codes on failure
13610 *
13611 *==========================================================================*/
13612int32_t QCamera3HardwareInterface::stopAllChannels()
13613{
13614 int32_t rc = NO_ERROR;
13615
13616 LOGD("Stopping all channels");
13617 // Stop the Streams/Channels
13618 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13619 it != mStreamInfo.end(); it++) {
13620 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13621 if (channel) {
13622 channel->stop();
13623 }
13624 (*it)->status = INVALID;
13625 }
13626
13627 if (mSupportChannel) {
13628 mSupportChannel->stop();
13629 }
13630 if (mAnalysisChannel) {
13631 mAnalysisChannel->stop();
13632 }
13633 if (mRawDumpChannel) {
13634 mRawDumpChannel->stop();
13635 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013636 if (mHdrPlusRawSrcChannel) {
13637 mHdrPlusRawSrcChannel->stop();
13638 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013639 if (mMetadataChannel) {
13640 /* If content of mStreamInfo is not 0, there is metadata stream */
13641 mMetadataChannel->stop();
13642 }
13643
13644 LOGD("All channels stopped");
13645 return rc;
13646}
13647
13648/*===========================================================================
13649 * FUNCTION : startAllChannels
13650 *
13651 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13652 *
13653 * PARAMETERS : None
13654 *
13655 * RETURN : NO_ERROR on success
13656 * Error codes on failure
13657 *
13658 *==========================================================================*/
13659int32_t QCamera3HardwareInterface::startAllChannels()
13660{
13661 int32_t rc = NO_ERROR;
13662
13663 LOGD("Start all channels ");
13664 // Start the Streams/Channels
13665 if (mMetadataChannel) {
13666 /* If content of mStreamInfo is not 0, there is metadata stream */
13667 rc = mMetadataChannel->start();
13668 if (rc < 0) {
13669 LOGE("META channel start failed");
13670 return rc;
13671 }
13672 }
13673 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13674 it != mStreamInfo.end(); it++) {
13675 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13676 if (channel) {
13677 rc = channel->start();
13678 if (rc < 0) {
13679 LOGE("channel start failed");
13680 return rc;
13681 }
13682 }
13683 }
13684 if (mAnalysisChannel) {
13685 mAnalysisChannel->start();
13686 }
13687 if (mSupportChannel) {
13688 rc = mSupportChannel->start();
13689 if (rc < 0) {
13690 LOGE("Support channel start failed");
13691 return rc;
13692 }
13693 }
13694 if (mRawDumpChannel) {
13695 rc = mRawDumpChannel->start();
13696 if (rc < 0) {
13697 LOGE("RAW dump channel start failed");
13698 return rc;
13699 }
13700 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013701 if (mHdrPlusRawSrcChannel) {
13702 rc = mHdrPlusRawSrcChannel->start();
13703 if (rc < 0) {
13704 LOGE("HDR+ RAW channel start failed");
13705 return rc;
13706 }
13707 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013708
13709 LOGD("All channels started");
13710 return rc;
13711}
13712
13713/*===========================================================================
13714 * FUNCTION : notifyErrorForPendingRequests
13715 *
13716 * DESCRIPTION: This function sends error for all the pending requests/buffers
13717 *
13718 * PARAMETERS : None
13719 *
13720 * RETURN : Error codes
13721 * NO_ERROR on success
13722 *
13723 *==========================================================================*/
13724int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13725{
13726 int32_t rc = NO_ERROR;
13727 unsigned int frameNum = 0;
13728 camera3_capture_result_t result;
13729 camera3_stream_buffer_t *pStream_Buf = NULL;
13730
13731 memset(&result, 0, sizeof(camera3_capture_result_t));
13732
13733 if (mPendingRequestsList.size() > 0) {
13734 pendingRequestIterator i = mPendingRequestsList.begin();
13735 frameNum = i->frame_number;
13736 } else {
13737 /* There might still be pending buffers even though there are
13738 no pending requests. Setting the frameNum to MAX so that
13739 all the buffers with smaller frame numbers are returned */
13740 frameNum = UINT_MAX;
13741 }
13742
13743 LOGH("Oldest frame num on mPendingRequestsList = %u",
13744 frameNum);
13745
Emilian Peev7650c122017-01-19 08:24:33 -080013746 notifyErrorFoPendingDepthData(mDepthChannel);
13747
Thierry Strudel3d639192016-09-09 11:52:26 -070013748 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13749 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13750
13751 if (req->frame_number < frameNum) {
13752 // Send Error notify to frameworks for each buffer for which
13753 // metadata buffer is already sent
13754 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13755 req->frame_number, req->mPendingBufferList.size());
13756
13757 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13758 if (NULL == pStream_Buf) {
13759 LOGE("No memory for pending buffers array");
13760 return NO_MEMORY;
13761 }
13762 memset(pStream_Buf, 0,
13763 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13764 result.result = NULL;
13765 result.frame_number = req->frame_number;
13766 result.num_output_buffers = req->mPendingBufferList.size();
13767 result.output_buffers = pStream_Buf;
13768
13769 size_t index = 0;
13770 for (auto info = req->mPendingBufferList.begin();
13771 info != req->mPendingBufferList.end(); ) {
13772
13773 camera3_notify_msg_t notify_msg;
13774 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13775 notify_msg.type = CAMERA3_MSG_ERROR;
13776 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13777 notify_msg.message.error.error_stream = info->stream;
13778 notify_msg.message.error.frame_number = req->frame_number;
13779 pStream_Buf[index].acquire_fence = -1;
13780 pStream_Buf[index].release_fence = -1;
13781 pStream_Buf[index].buffer = info->buffer;
13782 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13783 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013784 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013785 index++;
13786 // Remove buffer from list
13787 info = req->mPendingBufferList.erase(info);
13788 }
13789
13790 // Remove this request from Map
13791 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13792 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13793 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13794
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013795 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013796
13797 delete [] pStream_Buf;
13798 } else {
13799
13800 // Go through the pending requests info and send error request to framework
13801 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13802
13803 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13804
13805 // Send error notify to frameworks
13806 camera3_notify_msg_t notify_msg;
13807 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13808 notify_msg.type = CAMERA3_MSG_ERROR;
13809 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13810 notify_msg.message.error.error_stream = NULL;
13811 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013812 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013813
13814 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13815 if (NULL == pStream_Buf) {
13816 LOGE("No memory for pending buffers array");
13817 return NO_MEMORY;
13818 }
13819 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13820
13821 result.result = NULL;
13822 result.frame_number = req->frame_number;
13823 result.input_buffer = i->input_buffer;
13824 result.num_output_buffers = req->mPendingBufferList.size();
13825 result.output_buffers = pStream_Buf;
13826
13827 size_t index = 0;
13828 for (auto info = req->mPendingBufferList.begin();
13829 info != req->mPendingBufferList.end(); ) {
13830 pStream_Buf[index].acquire_fence = -1;
13831 pStream_Buf[index].release_fence = -1;
13832 pStream_Buf[index].buffer = info->buffer;
13833 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13834 pStream_Buf[index].stream = info->stream;
13835 index++;
13836 // Remove buffer from list
13837 info = req->mPendingBufferList.erase(info);
13838 }
13839
13840 // Remove this request from Map
13841 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13842 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13843 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13844
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013845 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013846 delete [] pStream_Buf;
13847 i = erasePendingRequest(i);
13848 }
13849 }
13850
13851 /* Reset pending frame Drop list and requests list */
13852 mPendingFrameDropList.clear();
13853
13854 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13855 req.mPendingBufferList.clear();
13856 }
13857 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013858 LOGH("Cleared all the pending buffers ");
13859
13860 return rc;
13861}
13862
13863bool QCamera3HardwareInterface::isOnEncoder(
13864 const cam_dimension_t max_viewfinder_size,
13865 uint32_t width, uint32_t height)
13866{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013867 return ((width > (uint32_t)max_viewfinder_size.width) ||
13868 (height > (uint32_t)max_viewfinder_size.height) ||
13869 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13870 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013871}
13872
13873/*===========================================================================
13874 * FUNCTION : setBundleInfo
13875 *
13876 * DESCRIPTION: Set bundle info for all streams that are bundle.
13877 *
13878 * PARAMETERS : None
13879 *
13880 * RETURN : NO_ERROR on success
13881 * Error codes on failure
13882 *==========================================================================*/
13883int32_t QCamera3HardwareInterface::setBundleInfo()
13884{
13885 int32_t rc = NO_ERROR;
13886
13887 if (mChannelHandle) {
13888 cam_bundle_config_t bundleInfo;
13889 memset(&bundleInfo, 0, sizeof(bundleInfo));
13890 rc = mCameraHandle->ops->get_bundle_info(
13891 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13892 if (rc != NO_ERROR) {
13893 LOGE("get_bundle_info failed");
13894 return rc;
13895 }
13896 if (mAnalysisChannel) {
13897 mAnalysisChannel->setBundleInfo(bundleInfo);
13898 }
13899 if (mSupportChannel) {
13900 mSupportChannel->setBundleInfo(bundleInfo);
13901 }
13902 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13903 it != mStreamInfo.end(); it++) {
13904 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13905 channel->setBundleInfo(bundleInfo);
13906 }
13907 if (mRawDumpChannel) {
13908 mRawDumpChannel->setBundleInfo(bundleInfo);
13909 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013910 if (mHdrPlusRawSrcChannel) {
13911 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13912 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013913 }
13914
13915 return rc;
13916}
13917
13918/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013919 * FUNCTION : setInstantAEC
13920 *
13921 * DESCRIPTION: Set Instant AEC related params.
13922 *
13923 * PARAMETERS :
13924 * @meta: CameraMetadata reference
13925 *
13926 * RETURN : NO_ERROR on success
13927 * Error codes on failure
13928 *==========================================================================*/
13929int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13930{
13931 int32_t rc = NO_ERROR;
13932 uint8_t val = 0;
13933 char prop[PROPERTY_VALUE_MAX];
13934
13935 // First try to configure instant AEC from framework metadata
13936 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
13937 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
13938 }
13939
13940 // If framework did not set this value, try to read from set prop.
13941 if (val == 0) {
13942 memset(prop, 0, sizeof(prop));
13943 property_get("persist.camera.instant.aec", prop, "0");
13944 val = (uint8_t)atoi(prop);
13945 }
13946
13947 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
13948 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
13949 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
13950 mInstantAEC = val;
13951 mInstantAECSettledFrameNumber = 0;
13952 mInstantAecFrameIdxCount = 0;
13953 LOGH("instantAEC value set %d",val);
13954 if (mInstantAEC) {
13955 memset(prop, 0, sizeof(prop));
13956 property_get("persist.camera.ae.instant.bound", prop, "10");
13957 int32_t aec_frame_skip_cnt = atoi(prop);
13958 if (aec_frame_skip_cnt >= 0) {
13959 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
13960 } else {
13961 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
13962 rc = BAD_VALUE;
13963 }
13964 }
13965 } else {
13966 LOGE("Bad instant aec value set %d", val);
13967 rc = BAD_VALUE;
13968 }
13969 return rc;
13970}
13971
13972/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013973 * FUNCTION : get_num_overall_buffers
13974 *
13975 * DESCRIPTION: Estimate number of pending buffers across all requests.
13976 *
13977 * PARAMETERS : None
13978 *
13979 * RETURN : Number of overall pending buffers
13980 *
13981 *==========================================================================*/
13982uint32_t PendingBuffersMap::get_num_overall_buffers()
13983{
13984 uint32_t sum_buffers = 0;
13985 for (auto &req : mPendingBuffersInRequest) {
13986 sum_buffers += req.mPendingBufferList.size();
13987 }
13988 return sum_buffers;
13989}
13990
13991/*===========================================================================
13992 * FUNCTION : removeBuf
13993 *
13994 * DESCRIPTION: Remove a matching buffer from tracker.
13995 *
13996 * PARAMETERS : @buffer: image buffer for the callback
13997 *
13998 * RETURN : None
13999 *
14000 *==========================================================================*/
14001void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14002{
14003 bool buffer_found = false;
14004 for (auto req = mPendingBuffersInRequest.begin();
14005 req != mPendingBuffersInRequest.end(); req++) {
14006 for (auto k = req->mPendingBufferList.begin();
14007 k != req->mPendingBufferList.end(); k++ ) {
14008 if (k->buffer == buffer) {
14009 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14010 req->frame_number, buffer);
14011 k = req->mPendingBufferList.erase(k);
14012 if (req->mPendingBufferList.empty()) {
14013 // Remove this request from Map
14014 req = mPendingBuffersInRequest.erase(req);
14015 }
14016 buffer_found = true;
14017 break;
14018 }
14019 }
14020 if (buffer_found) {
14021 break;
14022 }
14023 }
14024 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14025 get_num_overall_buffers());
14026}
14027
14028/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014029 * FUNCTION : getBufErrStatus
14030 *
14031 * DESCRIPTION: get buffer error status
14032 *
14033 * PARAMETERS : @buffer: buffer handle
14034 *
14035 * RETURN : Error status
14036 *
14037 *==========================================================================*/
14038int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14039{
14040 for (auto& req : mPendingBuffersInRequest) {
14041 for (auto& k : req.mPendingBufferList) {
14042 if (k.buffer == buffer)
14043 return k.bufStatus;
14044 }
14045 }
14046 return CAMERA3_BUFFER_STATUS_OK;
14047}
14048
14049/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014050 * FUNCTION : setPAAFSupport
14051 *
14052 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14053 * feature mask according to stream type and filter
14054 * arrangement
14055 *
14056 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14057 * @stream_type: stream type
14058 * @filter_arrangement: filter arrangement
14059 *
14060 * RETURN : None
14061 *==========================================================================*/
14062void QCamera3HardwareInterface::setPAAFSupport(
14063 cam_feature_mask_t& feature_mask,
14064 cam_stream_type_t stream_type,
14065 cam_color_filter_arrangement_t filter_arrangement)
14066{
Thierry Strudel3d639192016-09-09 11:52:26 -070014067 switch (filter_arrangement) {
14068 case CAM_FILTER_ARRANGEMENT_RGGB:
14069 case CAM_FILTER_ARRANGEMENT_GRBG:
14070 case CAM_FILTER_ARRANGEMENT_GBRG:
14071 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014072 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14073 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014074 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014075 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14076 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014077 }
14078 break;
14079 case CAM_FILTER_ARRANGEMENT_Y:
14080 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14081 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14082 }
14083 break;
14084 default:
14085 break;
14086 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014087 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14088 feature_mask, stream_type, filter_arrangement);
14089
14090
Thierry Strudel3d639192016-09-09 11:52:26 -070014091}
14092
14093/*===========================================================================
14094* FUNCTION : getSensorMountAngle
14095*
14096* DESCRIPTION: Retrieve sensor mount angle
14097*
14098* PARAMETERS : None
14099*
14100* RETURN : sensor mount angle in uint32_t
14101*==========================================================================*/
14102uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14103{
14104 return gCamCapability[mCameraId]->sensor_mount_angle;
14105}
14106
14107/*===========================================================================
14108* FUNCTION : getRelatedCalibrationData
14109*
14110* DESCRIPTION: Retrieve related system calibration data
14111*
14112* PARAMETERS : None
14113*
14114* RETURN : Pointer of related system calibration data
14115*==========================================================================*/
14116const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14117{
14118 return (const cam_related_system_calibration_data_t *)
14119 &(gCamCapability[mCameraId]->related_cam_calibration);
14120}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014121
14122/*===========================================================================
14123 * FUNCTION : is60HzZone
14124 *
14125 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14126 *
14127 * PARAMETERS : None
14128 *
14129 * RETURN : True if in 60Hz zone, False otherwise
14130 *==========================================================================*/
14131bool QCamera3HardwareInterface::is60HzZone()
14132{
14133 time_t t = time(NULL);
14134 struct tm lt;
14135
14136 struct tm* r = localtime_r(&t, &lt);
14137
14138 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14139 return true;
14140 else
14141 return false;
14142}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014143
14144/*===========================================================================
14145 * FUNCTION : adjustBlackLevelForCFA
14146 *
14147 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14148 * of bayer CFA (Color Filter Array).
14149 *
14150 * PARAMETERS : @input: black level pattern in the order of RGGB
14151 * @output: black level pattern in the order of CFA
14152 * @color_arrangement: CFA color arrangement
14153 *
14154 * RETURN : None
14155 *==========================================================================*/
14156template<typename T>
14157void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14158 T input[BLACK_LEVEL_PATTERN_CNT],
14159 T output[BLACK_LEVEL_PATTERN_CNT],
14160 cam_color_filter_arrangement_t color_arrangement)
14161{
14162 switch (color_arrangement) {
14163 case CAM_FILTER_ARRANGEMENT_GRBG:
14164 output[0] = input[1];
14165 output[1] = input[0];
14166 output[2] = input[3];
14167 output[3] = input[2];
14168 break;
14169 case CAM_FILTER_ARRANGEMENT_GBRG:
14170 output[0] = input[2];
14171 output[1] = input[3];
14172 output[2] = input[0];
14173 output[3] = input[1];
14174 break;
14175 case CAM_FILTER_ARRANGEMENT_BGGR:
14176 output[0] = input[3];
14177 output[1] = input[2];
14178 output[2] = input[1];
14179 output[3] = input[0];
14180 break;
14181 case CAM_FILTER_ARRANGEMENT_RGGB:
14182 output[0] = input[0];
14183 output[1] = input[1];
14184 output[2] = input[2];
14185 output[3] = input[3];
14186 break;
14187 default:
14188 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14189 break;
14190 }
14191}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014192
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014193void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14194 CameraMetadata &resultMetadata,
14195 std::shared_ptr<metadata_buffer_t> settings)
14196{
14197 if (settings == nullptr) {
14198 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14199 return;
14200 }
14201
14202 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14203 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14204 }
14205
14206 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14207 String8 str((const char *)gps_methods);
14208 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14209 }
14210
14211 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14212 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14213 }
14214
14215 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14216 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14217 }
14218
14219 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14220 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14221 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14222 }
14223
14224 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14225 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14226 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14227 }
14228
14229 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14230 int32_t fwk_thumb_size[2];
14231 fwk_thumb_size[0] = thumb_size->width;
14232 fwk_thumb_size[1] = thumb_size->height;
14233 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14234 }
14235
14236 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14237 uint8_t fwk_intent = intent[0];
14238 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14239 }
14240}
14241
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014242bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14243 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14244 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014245{
14246 if (hdrPlusRequest == nullptr) return false;
14247
14248 // Check noise reduction mode is high quality.
14249 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14250 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14251 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014252 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14253 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014254 return false;
14255 }
14256
14257 // Check edge mode is high quality.
14258 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14259 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14260 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14261 return false;
14262 }
14263
14264 if (request.num_output_buffers != 1 ||
14265 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14266 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014267 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14268 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14269 request.output_buffers[0].stream->width,
14270 request.output_buffers[0].stream->height,
14271 request.output_buffers[0].stream->format);
14272 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014273 return false;
14274 }
14275
14276 // Get a YUV buffer from pic channel.
14277 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14278 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14279 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14280 if (res != OK) {
14281 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14282 __FUNCTION__, strerror(-res), res);
14283 return false;
14284 }
14285
14286 pbcamera::StreamBuffer buffer;
14287 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014288 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014289 buffer.data = yuvBuffer->buffer;
14290 buffer.dataSize = yuvBuffer->frame_len;
14291
14292 pbcamera::CaptureRequest pbRequest;
14293 pbRequest.id = request.frame_number;
14294 pbRequest.outputBuffers.push_back(buffer);
14295
14296 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014297 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014298 if (res != OK) {
14299 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14300 strerror(-res), res);
14301 return false;
14302 }
14303
14304 hdrPlusRequest->yuvBuffer = yuvBuffer;
14305 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14306
14307 return true;
14308}
14309
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014310status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked() {
14311 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14312 return OK;
14313 }
14314
14315 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14316 if (res != OK) {
14317 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14318 strerror(-res), res);
14319 return res;
14320 }
14321 gHdrPlusClientOpening = true;
14322
14323 return OK;
14324}
14325
Chien-Yu Chenee335912017-02-09 17:53:20 -080014326status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14327{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014328 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014329
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014330 // Check if gHdrPlusClient is opened or being opened.
14331 if (gHdrPlusClient == nullptr) {
14332 if (gHdrPlusClientOpening) {
14333 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14334 return OK;
14335 }
14336
14337 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014338 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014339 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14340 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014341 return res;
14342 }
14343
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014344 // When opening HDR+ client completes, HDR+ mode will be enabled.
14345 return OK;
14346
Chien-Yu Chenee335912017-02-09 17:53:20 -080014347 }
14348
14349 // Configure stream for HDR+.
14350 res = configureHdrPlusStreamsLocked();
14351 if (res != OK) {
14352 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014353 return res;
14354 }
14355
14356 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14357 res = gHdrPlusClient->setZslHdrPlusMode(true);
14358 if (res != OK) {
14359 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014360 return res;
14361 }
14362
14363 mHdrPlusModeEnabled = true;
14364 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14365
14366 return OK;
14367}
14368
14369void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14370{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014371 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014372 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014373 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14374 if (res != OK) {
14375 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14376 }
Chien-Yu Chenee335912017-02-09 17:53:20 -080014377 }
14378
14379 mHdrPlusModeEnabled = false;
14380 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14381}
14382
14383status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014384{
14385 pbcamera::InputConfiguration inputConfig;
14386 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14387 status_t res = OK;
14388
14389 // Configure HDR+ client streams.
14390 // Get input config.
14391 if (mHdrPlusRawSrcChannel) {
14392 // HDR+ input buffers will be provided by HAL.
14393 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14394 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14395 if (res != OK) {
14396 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14397 __FUNCTION__, strerror(-res), res);
14398 return res;
14399 }
14400
14401 inputConfig.isSensorInput = false;
14402 } else {
14403 // Sensor MIPI will send data to Easel.
14404 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014405 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014406 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14407 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14408 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14409 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14410 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14411 if (mSensorModeInfo.num_raw_bits != 10) {
14412 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14413 mSensorModeInfo.num_raw_bits);
14414 return BAD_VALUE;
14415 }
14416
14417 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014418 }
14419
14420 // Get output configurations.
14421 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014422 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014423
14424 // Easel may need to output YUV output buffers if mPictureChannel was created.
14425 pbcamera::StreamConfiguration yuvOutputConfig;
14426 if (mPictureChannel != nullptr) {
14427 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14428 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14429 if (res != OK) {
14430 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14431 __FUNCTION__, strerror(-res), res);
14432
14433 return res;
14434 }
14435
14436 outputStreamConfigs.push_back(yuvOutputConfig);
14437 }
14438
14439 // TODO: consider other channels for YUV output buffers.
14440
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014441 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014442 if (res != OK) {
14443 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14444 strerror(-res), res);
14445 return res;
14446 }
14447
14448 return OK;
14449}
14450
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014451void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client) {
14452 if (client == nullptr) {
14453 ALOGE("%s: Opened client is null.", __FUNCTION__);
14454 return;
14455 }
14456
14457 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14458
14459 Mutex::Autolock l(gHdrPlusClientLock);
14460 gHdrPlusClient = std::move(client);
14461 gHdrPlusClientOpening = false;
14462
14463 // Set static metadata.
14464 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14465 if (res != OK) {
14466 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14467 __FUNCTION__, strerror(-res), res);
14468 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14469 gHdrPlusClient = nullptr;
14470 return;
14471 }
14472
14473 // Enable HDR+ mode.
14474 res = enableHdrPlusModeLocked();
14475 if (res != OK) {
14476 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14477 }
14478}
14479
14480void QCamera3HardwareInterface::onOpenFailed(status_t err) {
14481 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14482 Mutex::Autolock l(gHdrPlusClientLock);
14483 gHdrPlusClientOpening = false;
14484}
14485
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014486void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14487 const camera_metadata_t &resultMetadata) {
14488 if (result != nullptr) {
14489 if (result->outputBuffers.size() != 1) {
14490 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14491 result->outputBuffers.size());
14492 return;
14493 }
14494
14495 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14496 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14497 result->outputBuffers[0].streamId);
14498 return;
14499 }
14500
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014501 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014502 HdrPlusPendingRequest pendingRequest;
14503 {
14504 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14505 auto req = mHdrPlusPendingRequests.find(result->requestId);
14506 pendingRequest = req->second;
14507 }
14508
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014509 // Update the result metadata with the settings of the HDR+ still capture request because
14510 // the result metadata belongs to a ZSL buffer.
14511 CameraMetadata metadata;
14512 metadata = &resultMetadata;
14513 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14514 camera_metadata_t* updatedResultMetadata = metadata.release();
14515
14516 QCamera3PicChannel *picChannel =
14517 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14518
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014519 // Check if dumping HDR+ YUV output is enabled.
14520 char prop[PROPERTY_VALUE_MAX];
14521 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14522 bool dumpYuvOutput = atoi(prop);
14523
14524 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014525 // Dump yuv buffer to a ppm file.
14526 pbcamera::StreamConfiguration outputConfig;
14527 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14528 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14529 if (rc == OK) {
14530 char buf[FILENAME_MAX] = {};
14531 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14532 result->requestId, result->outputBuffers[0].streamId,
14533 outputConfig.image.width, outputConfig.image.height);
14534
14535 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14536 } else {
14537 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14538 __FUNCTION__, strerror(-rc), rc);
14539 }
14540 }
14541
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014542 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14543 auto halMetadata = std::make_shared<metadata_buffer_t>();
14544 clear_metadata_buffer(halMetadata.get());
14545
14546 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14547 // encoding.
14548 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14549 halStreamId, /*minFrameDuration*/0);
14550 if (res == OK) {
14551 // Return the buffer to pic channel for encoding.
14552 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14553 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14554 halMetadata);
14555 } else {
14556 // Return the buffer without encoding.
14557 // TODO: This should not happen but we may want to report an error buffer to camera
14558 // service.
14559 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14560 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14561 strerror(-res), res);
14562 }
14563
14564 // Send HDR+ metadata to framework.
14565 {
14566 pthread_mutex_lock(&mMutex);
14567
14568 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14569 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14570 pthread_mutex_unlock(&mMutex);
14571 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014572
14573 // Remove the HDR+ pending request.
14574 {
14575 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14576 auto req = mHdrPlusPendingRequests.find(result->requestId);
14577 mHdrPlusPendingRequests.erase(req);
14578 }
14579 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014580}
14581
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014582void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14583 // TODO: Handle HDR+ capture failures and send the failure to framework.
14584 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14585 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14586
14587 // Return the buffer to pic channel.
14588 QCamera3PicChannel *picChannel =
14589 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14590 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14591
14592 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014593}
14594
Thierry Strudel3d639192016-09-09 11:52:26 -070014595}; //end namespace qcamera