blob: 97f6145aaf288e0f6abb07c9154df046d878a162 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
134#define MAX_PREFERRED_ZOOM_RATIO 5.0
135
Thierry Strudel3d639192016-09-09 11:52:26 -0700136cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
137const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
138extern pthread_mutex_t gCamLock;
139volatile uint32_t gCamHal3LogLevel = 1;
140extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700141
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800142// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143// The following Easel related variables must be protected by gHdrPlusClientLock.
144EaselManagerClient gEaselManagerClient;
145bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
146std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
147bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700148bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700149bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800151// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
152bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
154Mutex gHdrPlusClientLock; // Protect above Easel related variables.
155
Thierry Strudel3d639192016-09-09 11:52:26 -0700156
157const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
158 {"On", CAM_CDS_MODE_ON},
159 {"Off", CAM_CDS_MODE_OFF},
160 {"Auto",CAM_CDS_MODE_AUTO}
161};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700162const QCamera3HardwareInterface::QCameraMap<
163 camera_metadata_enum_android_video_hdr_mode_t,
164 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
165 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
166 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
167};
168
Thierry Strudel54dc9782017-02-15 12:12:10 -0800169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_binning_correction_mode_t,
171 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
172 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
173 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
174};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700175
176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_ir_mode_t,
178 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
179 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
180 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
181 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
182};
Thierry Strudel3d639192016-09-09 11:52:26 -0700183
184const QCamera3HardwareInterface::QCameraMap<
185 camera_metadata_enum_android_control_effect_mode_t,
186 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
187 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
188 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
189 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
190 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
191 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
192 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
193 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
194 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
195 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
196};
197
198const QCamera3HardwareInterface::QCameraMap<
199 camera_metadata_enum_android_control_awb_mode_t,
200 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
201 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
202 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
203 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
204 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
205 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
206 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
207 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
208 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
209 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
210};
211
212const QCamera3HardwareInterface::QCameraMap<
213 camera_metadata_enum_android_control_scene_mode_t,
214 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
215 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
216 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
217 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
218 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
219 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
220 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
222 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
223 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
224 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
225 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
226 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
227 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
228 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
229 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800230 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
231 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700232};
233
234const QCamera3HardwareInterface::QCameraMap<
235 camera_metadata_enum_android_control_af_mode_t,
236 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
237 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
238 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
239 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
240 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
241 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
242 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
243 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
244};
245
246const QCamera3HardwareInterface::QCameraMap<
247 camera_metadata_enum_android_color_correction_aberration_mode_t,
248 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
249 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
250 CAM_COLOR_CORRECTION_ABERRATION_OFF },
251 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
252 CAM_COLOR_CORRECTION_ABERRATION_FAST },
253 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
254 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_control_ae_antibanding_mode_t,
259 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
260 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
261 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
262 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_control_ae_mode_t,
268 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
269 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
270 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
271 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
272 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
273 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
274};
275
276const QCamera3HardwareInterface::QCameraMap<
277 camera_metadata_enum_android_flash_mode_t,
278 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
279 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
280 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
281 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_statistics_face_detect_mode_t,
286 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
287 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
288 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
289 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
294 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
295 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
296 CAM_FOCUS_UNCALIBRATED },
297 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
298 CAM_FOCUS_APPROXIMATE },
299 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
300 CAM_FOCUS_CALIBRATED }
301};
302
303const QCamera3HardwareInterface::QCameraMap<
304 camera_metadata_enum_android_lens_state_t,
305 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
306 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
307 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
308};
309
310const int32_t available_thumbnail_sizes[] = {0, 0,
311 176, 144,
312 240, 144,
313 256, 144,
314 240, 160,
315 256, 154,
316 240, 240,
317 320, 240};
318
319const QCamera3HardwareInterface::QCameraMap<
320 camera_metadata_enum_android_sensor_test_pattern_mode_t,
321 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
322 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
323 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
324 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
328};
329
330/* Since there is no mapping for all the options some Android enum are not listed.
331 * Also, the order in this list is important because while mapping from HAL to Android it will
332 * traverse from lower to higher index which means that for HAL values that are map to different
333 * Android values, the traverse logic will select the first one found.
334 */
335const QCamera3HardwareInterface::QCameraMap<
336 camera_metadata_enum_android_sensor_reference_illuminant1_t,
337 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
338 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
339 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
340 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
354};
355
356const QCamera3HardwareInterface::QCameraMap<
357 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
358 { 60, CAM_HFR_MODE_60FPS},
359 { 90, CAM_HFR_MODE_90FPS},
360 { 120, CAM_HFR_MODE_120FPS},
361 { 150, CAM_HFR_MODE_150FPS},
362 { 180, CAM_HFR_MODE_180FPS},
363 { 210, CAM_HFR_MODE_210FPS},
364 { 240, CAM_HFR_MODE_240FPS},
365 { 480, CAM_HFR_MODE_480FPS},
366};
367
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700368const QCamera3HardwareInterface::QCameraMap<
369 qcamera3_ext_instant_aec_mode_t,
370 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
371 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
372 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
373 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
374};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800375
376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_exposure_meter_mode_t,
378 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
379 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
380 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
381 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
382 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
383 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
386};
387
388const QCamera3HardwareInterface::QCameraMap<
389 qcamera3_ext_iso_mode_t,
390 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
391 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
392 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
393 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
394 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
395 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
396 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
397 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
398 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
399};
400
Thierry Strudel3d639192016-09-09 11:52:26 -0700401camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
402 .initialize = QCamera3HardwareInterface::initialize,
403 .configure_streams = QCamera3HardwareInterface::configure_streams,
404 .register_stream_buffers = NULL,
405 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
406 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
407 .get_metadata_vendor_tag_ops = NULL,
408 .dump = QCamera3HardwareInterface::dump,
409 .flush = QCamera3HardwareInterface::flush,
410 .reserved = {0},
411};
412
413// initialise to some default value
414uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
415
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700416static inline void logEaselEvent(const char *tag, const char *event) {
417 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
418 struct timespec ts = {};
419 static int64_t kMsPerSec = 1000;
420 static int64_t kNsPerMs = 1000000;
421 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
422 if (res != OK) {
423 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
424 } else {
425 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
426 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
427 }
428 }
429}
430
Thierry Strudel3d639192016-09-09 11:52:26 -0700431/*===========================================================================
432 * FUNCTION : QCamera3HardwareInterface
433 *
434 * DESCRIPTION: constructor of QCamera3HardwareInterface
435 *
436 * PARAMETERS :
437 * @cameraId : camera ID
438 *
439 * RETURN : none
440 *==========================================================================*/
441QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
442 const camera_module_callbacks_t *callbacks)
443 : mCameraId(cameraId),
444 mCameraHandle(NULL),
445 mCameraInitialized(false),
446 mCallbackOps(NULL),
447 mMetadataChannel(NULL),
448 mPictureChannel(NULL),
449 mRawChannel(NULL),
450 mSupportChannel(NULL),
451 mAnalysisChannel(NULL),
452 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700453 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800455 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mChannelHandle(0),
458 mFirstConfiguration(true),
459 mFlush(false),
460 mFlushPerf(false),
461 mParamHeap(NULL),
462 mParameters(NULL),
463 mPrevParameters(NULL),
464 m_bIsVideo(false),
465 m_bIs4KVideo(false),
466 m_bEisSupportedSize(false),
467 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800468 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700469 m_MobicatMask(0),
470 mMinProcessedFrameDuration(0),
471 mMinJpegFrameDuration(0),
472 mMinRawFrameDuration(0),
473 mMetaFrameCount(0U),
474 mUpdateDebugLevel(false),
475 mCallbacks(callbacks),
476 mCaptureIntent(0),
477 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700478 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800479 /* DevCamDebug metadata internal m control*/
480 mDevCamDebugMetaEnable(0),
481 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mBatchSize(0),
483 mToBeQueuedVidBufs(0),
484 mHFRVideoFps(DEFAULT_VIDEO_FPS),
485 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800486 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800487 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700488 mFirstFrameNumberInBatch(0),
489 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800490 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700491 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
492 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000493 mPDSupported(false),
494 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700495 mInstantAEC(false),
496 mResetInstantAEC(false),
497 mInstantAECSettledFrameNumber(0),
498 mAecSkipDisplayFrameBound(0),
499 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800500 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700502 mLastCustIntentFrmNum(-1),
503 mState(CLOSED),
504 mIsDeviceLinked(false),
505 mIsMainCamera(true),
506 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700507 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800508 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800509 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700510 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800511 mIsApInputUsedForHdrPlus(false),
512 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800513 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700514{
515 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mCommon.init(gCamCapability[cameraId]);
517 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700518#ifndef USE_HAL_3_3
519 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
520#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700521 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700522#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700523 mCameraDevice.common.close = close_camera_device;
524 mCameraDevice.ops = &mCameraOps;
525 mCameraDevice.priv = this;
526 gCamCapability[cameraId]->version = CAM_HAL_V3;
527 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
528 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
529 gCamCapability[cameraId]->min_num_pp_bufs = 3;
530
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800531 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700532
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800533 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mPendingLiveRequest = 0;
535 mCurrentRequestId = -1;
536 pthread_mutex_init(&mMutex, NULL);
537
538 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
539 mDefaultMetadata[i] = NULL;
540
541 // Getting system props of different kinds
542 char prop[PROPERTY_VALUE_MAX];
543 memset(prop, 0, sizeof(prop));
544 property_get("persist.camera.raw.dump", prop, "0");
545 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800546 property_get("persist.camera.hal3.force.hdr", prop, "0");
547 mForceHdrSnapshot = atoi(prop);
548
Thierry Strudel3d639192016-09-09 11:52:26 -0700549 if (mEnableRawDump)
550 LOGD("Raw dump from Camera HAL enabled");
551
552 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
553 memset(mLdafCalib, 0, sizeof(mLdafCalib));
554
555 memset(prop, 0, sizeof(prop));
556 property_get("persist.camera.tnr.preview", prop, "0");
557 m_bTnrPreview = (uint8_t)atoi(prop);
558
559 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800560 property_get("persist.camera.swtnr.preview", prop, "1");
561 m_bSwTnrPreview = (uint8_t)atoi(prop);
562
563 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700564 property_get("persist.camera.tnr.video", prop, "0");
565 m_bTnrVideo = (uint8_t)atoi(prop);
566
567 memset(prop, 0, sizeof(prop));
568 property_get("persist.camera.avtimer.debug", prop, "0");
569 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800570 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700571
Thierry Strudel54dc9782017-02-15 12:12:10 -0800572 memset(prop, 0, sizeof(prop));
573 property_get("persist.camera.cacmode.disable", prop, "0");
574 m_cacModeDisabled = (uint8_t)atoi(prop);
575
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 //Load and read GPU library.
577 lib_surface_utils = NULL;
578 LINK_get_surface_pixel_alignment = NULL;
579 mSurfaceStridePadding = CAM_PAD_TO_32;
580 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
581 if (lib_surface_utils) {
582 *(void **)&LINK_get_surface_pixel_alignment =
583 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
584 if (LINK_get_surface_pixel_alignment) {
585 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
586 }
587 dlclose(lib_surface_utils);
588 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700589
Emilian Peev0f3c3162017-03-15 12:57:46 +0000590 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
591 mPDSupported = (0 <= mPDIndex) ? true : false;
592
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700593 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700594}
595
596/*===========================================================================
597 * FUNCTION : ~QCamera3HardwareInterface
598 *
599 * DESCRIPTION: destructor of QCamera3HardwareInterface
600 *
601 * PARAMETERS : none
602 *
603 * RETURN : none
604 *==========================================================================*/
605QCamera3HardwareInterface::~QCamera3HardwareInterface()
606{
607 LOGD("E");
608
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800609 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700610
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800611 // Disable power hint and enable the perf lock for close camera
612 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
613 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
614
615 // unlink of dualcam during close camera
616 if (mIsDeviceLinked) {
617 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
618 &m_pDualCamCmdPtr->bundle_info;
619 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
620 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
621 pthread_mutex_lock(&gCamLock);
622
623 if (mIsMainCamera == 1) {
624 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
625 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
626 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
627 // related session id should be session id of linked session
628 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
629 } else {
630 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
631 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
632 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
633 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
634 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800635 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800636 pthread_mutex_unlock(&gCamLock);
637
638 rc = mCameraHandle->ops->set_dual_cam_cmd(
639 mCameraHandle->camera_handle);
640 if (rc < 0) {
641 LOGE("Dualcam: Unlink failed, but still proceed to close");
642 }
643 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700644
645 /* We need to stop all streams before deleting any stream */
646 if (mRawDumpChannel) {
647 mRawDumpChannel->stop();
648 }
649
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700650 if (mHdrPlusRawSrcChannel) {
651 mHdrPlusRawSrcChannel->stop();
652 }
653
Thierry Strudel3d639192016-09-09 11:52:26 -0700654 // NOTE: 'camera3_stream_t *' objects are already freed at
655 // this stage by the framework
656 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
657 it != mStreamInfo.end(); it++) {
658 QCamera3ProcessingChannel *channel = (*it)->channel;
659 if (channel) {
660 channel->stop();
661 }
662 }
663 if (mSupportChannel)
664 mSupportChannel->stop();
665
666 if (mAnalysisChannel) {
667 mAnalysisChannel->stop();
668 }
669 if (mMetadataChannel) {
670 mMetadataChannel->stop();
671 }
672 if (mChannelHandle) {
673 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
674 mChannelHandle);
675 LOGD("stopping channel %d", mChannelHandle);
676 }
677
678 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
679 it != mStreamInfo.end(); it++) {
680 QCamera3ProcessingChannel *channel = (*it)->channel;
681 if (channel)
682 delete channel;
683 free (*it);
684 }
685 if (mSupportChannel) {
686 delete mSupportChannel;
687 mSupportChannel = NULL;
688 }
689
690 if (mAnalysisChannel) {
691 delete mAnalysisChannel;
692 mAnalysisChannel = NULL;
693 }
694 if (mRawDumpChannel) {
695 delete mRawDumpChannel;
696 mRawDumpChannel = NULL;
697 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700698 if (mHdrPlusRawSrcChannel) {
699 delete mHdrPlusRawSrcChannel;
700 mHdrPlusRawSrcChannel = NULL;
701 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700702 if (mDummyBatchChannel) {
703 delete mDummyBatchChannel;
704 mDummyBatchChannel = NULL;
705 }
706
707 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800708 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700709
710 if (mMetadataChannel) {
711 delete mMetadataChannel;
712 mMetadataChannel = NULL;
713 }
714
715 /* Clean up all channels */
716 if (mCameraInitialized) {
717 if(!mFirstConfiguration){
718 //send the last unconfigure
719 cam_stream_size_info_t stream_config_info;
720 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
721 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
722 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800723 m_bIs4KVideo ? 0 :
724 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700725 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700726 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
727 stream_config_info);
728 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
729 if (rc < 0) {
730 LOGE("set_parms failed for unconfigure");
731 }
732 }
733 deinitParameters();
734 }
735
736 if (mChannelHandle) {
737 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
738 mChannelHandle);
739 LOGH("deleting channel %d", mChannelHandle);
740 mChannelHandle = 0;
741 }
742
743 if (mState != CLOSED)
744 closeCamera();
745
746 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
747 req.mPendingBufferList.clear();
748 }
749 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700750 for (pendingRequestIterator i = mPendingRequestsList.begin();
751 i != mPendingRequestsList.end();) {
752 i = erasePendingRequest(i);
753 }
754 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
755 if (mDefaultMetadata[i])
756 free_camera_metadata(mDefaultMetadata[i]);
757
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800758 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700759
760 pthread_cond_destroy(&mRequestCond);
761
762 pthread_cond_destroy(&mBuffersCond);
763
764 pthread_mutex_destroy(&mMutex);
765 LOGD("X");
766}
767
768/*===========================================================================
769 * FUNCTION : erasePendingRequest
770 *
771 * DESCRIPTION: function to erase a desired pending request after freeing any
772 * allocated memory
773 *
774 * PARAMETERS :
775 * @i : iterator pointing to pending request to be erased
776 *
777 * RETURN : iterator pointing to the next request
778 *==========================================================================*/
779QCamera3HardwareInterface::pendingRequestIterator
780 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
781{
782 if (i->input_buffer != NULL) {
783 free(i->input_buffer);
784 i->input_buffer = NULL;
785 }
786 if (i->settings != NULL)
787 free_camera_metadata((camera_metadata_t*)i->settings);
788 return mPendingRequestsList.erase(i);
789}
790
791/*===========================================================================
792 * FUNCTION : camEvtHandle
793 *
794 * DESCRIPTION: Function registered to mm-camera-interface to handle events
795 *
796 * PARAMETERS :
797 * @camera_handle : interface layer camera handle
798 * @evt : ptr to event
799 * @user_data : user data ptr
800 *
801 * RETURN : none
802 *==========================================================================*/
803void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
804 mm_camera_event_t *evt,
805 void *user_data)
806{
807 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
808 if (obj && evt) {
809 switch(evt->server_event_type) {
810 case CAM_EVENT_TYPE_DAEMON_DIED:
811 pthread_mutex_lock(&obj->mMutex);
812 obj->mState = ERROR;
813 pthread_mutex_unlock(&obj->mMutex);
814 LOGE("Fatal, camera daemon died");
815 break;
816
817 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
818 LOGD("HAL got request pull from Daemon");
819 pthread_mutex_lock(&obj->mMutex);
820 obj->mWokenUpByDaemon = true;
821 obj->unblockRequestIfNecessary();
822 pthread_mutex_unlock(&obj->mMutex);
823 break;
824
825 default:
826 LOGW("Warning: Unhandled event %d",
827 evt->server_event_type);
828 break;
829 }
830 } else {
831 LOGE("NULL user_data/evt");
832 }
833}
834
835/*===========================================================================
836 * FUNCTION : openCamera
837 *
838 * DESCRIPTION: open camera
839 *
840 * PARAMETERS :
841 * @hw_device : double ptr for camera device struct
842 *
843 * RETURN : int32_t type of status
844 * NO_ERROR -- success
845 * none-zero failure code
846 *==========================================================================*/
847int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
848{
849 int rc = 0;
850 if (mState != CLOSED) {
851 *hw_device = NULL;
852 return PERMISSION_DENIED;
853 }
854
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800855 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700856 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
857 mCameraId);
858
859 rc = openCamera();
860 if (rc == 0) {
861 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800862 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700863 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800864 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700865
Thierry Strudel3d639192016-09-09 11:52:26 -0700866 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
867 mCameraId, rc);
868
869 if (rc == NO_ERROR) {
870 mState = OPENED;
871 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800872
Thierry Strudel3d639192016-09-09 11:52:26 -0700873 return rc;
874}
875
876/*===========================================================================
877 * FUNCTION : openCamera
878 *
879 * DESCRIPTION: open camera
880 *
881 * PARAMETERS : none
882 *
883 * RETURN : int32_t type of status
884 * NO_ERROR -- success
885 * none-zero failure code
886 *==========================================================================*/
887int QCamera3HardwareInterface::openCamera()
888{
889 int rc = 0;
890 char value[PROPERTY_VALUE_MAX];
891
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800892 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700893 if (mCameraHandle) {
894 LOGE("Failure: Camera already opened");
895 return ALREADY_EXISTS;
896 }
897
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700898 {
899 Mutex::Autolock l(gHdrPlusClientLock);
900 if (gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700901 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700902 rc = gEaselManagerClient.resume();
903 if (rc != 0) {
904 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
905 return rc;
906 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800907 }
908 }
909
Thierry Strudel3d639192016-09-09 11:52:26 -0700910 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
911 if (rc < 0) {
912 LOGE("Failed to reserve flash for camera id: %d",
913 mCameraId);
914 return UNKNOWN_ERROR;
915 }
916
917 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
918 if (rc) {
919 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
920 return rc;
921 }
922
923 if (!mCameraHandle) {
924 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
925 return -ENODEV;
926 }
927
928 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
929 camEvtHandle, (void *)this);
930
931 if (rc < 0) {
932 LOGE("Error, failed to register event callback");
933 /* Not closing camera here since it is already handled in destructor */
934 return FAILED_TRANSACTION;
935 }
936
937 mExifParams.debug_params =
938 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
939 if (mExifParams.debug_params) {
940 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
941 } else {
942 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
943 return NO_MEMORY;
944 }
945 mFirstConfiguration = true;
946
947 //Notify display HAL that a camera session is active.
948 //But avoid calling the same during bootup because camera service might open/close
949 //cameras at boot time during its initialization and display service will also internally
950 //wait for camera service to initialize first while calling this display API, resulting in a
951 //deadlock situation. Since boot time camera open/close calls are made only to fetch
952 //capabilities, no need of this display bw optimization.
953 //Use "service.bootanim.exit" property to know boot status.
954 property_get("service.bootanim.exit", value, "0");
955 if (atoi(value) == 1) {
956 pthread_mutex_lock(&gCamLock);
957 if (gNumCameraSessions++ == 0) {
958 setCameraLaunchStatus(true);
959 }
960 pthread_mutex_unlock(&gCamLock);
961 }
962
963 //fill the session id needed while linking dual cam
964 pthread_mutex_lock(&gCamLock);
965 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
966 &sessionId[mCameraId]);
967 pthread_mutex_unlock(&gCamLock);
968
969 if (rc < 0) {
970 LOGE("Error, failed to get sessiion id");
971 return UNKNOWN_ERROR;
972 } else {
973 //Allocate related cam sync buffer
974 //this is needed for the payload that goes along with bundling cmd for related
975 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700976 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
977 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700978 if(rc != OK) {
979 rc = NO_MEMORY;
980 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
981 return NO_MEMORY;
982 }
983
984 //Map memory for related cam sync buffer
985 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700986 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
987 m_pDualCamCmdHeap->getFd(0),
988 sizeof(cam_dual_camera_cmd_info_t),
989 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700990 if(rc < 0) {
991 LOGE("Dualcam: failed to map Related cam sync buffer");
992 rc = FAILED_TRANSACTION;
993 return NO_MEMORY;
994 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700995 m_pDualCamCmdPtr =
996 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700997 }
998
999 LOGH("mCameraId=%d",mCameraId);
1000
1001 return NO_ERROR;
1002}
1003
1004/*===========================================================================
1005 * FUNCTION : closeCamera
1006 *
1007 * DESCRIPTION: close camera
1008 *
1009 * PARAMETERS : none
1010 *
1011 * RETURN : int32_t type of status
1012 * NO_ERROR -- success
1013 * none-zero failure code
1014 *==========================================================================*/
1015int QCamera3HardwareInterface::closeCamera()
1016{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001017 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001018 int rc = NO_ERROR;
1019 char value[PROPERTY_VALUE_MAX];
1020
1021 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1022 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001023
1024 // unmap memory for related cam sync buffer
1025 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001026 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001027 if (NULL != m_pDualCamCmdHeap) {
1028 m_pDualCamCmdHeap->deallocate();
1029 delete m_pDualCamCmdHeap;
1030 m_pDualCamCmdHeap = NULL;
1031 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001032 }
1033
Thierry Strudel3d639192016-09-09 11:52:26 -07001034 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1035 mCameraHandle = NULL;
1036
1037 //reset session id to some invalid id
1038 pthread_mutex_lock(&gCamLock);
1039 sessionId[mCameraId] = 0xDEADBEEF;
1040 pthread_mutex_unlock(&gCamLock);
1041
1042 //Notify display HAL that there is no active camera session
1043 //but avoid calling the same during bootup. Refer to openCamera
1044 //for more details.
1045 property_get("service.bootanim.exit", value, "0");
1046 if (atoi(value) == 1) {
1047 pthread_mutex_lock(&gCamLock);
1048 if (--gNumCameraSessions == 0) {
1049 setCameraLaunchStatus(false);
1050 }
1051 pthread_mutex_unlock(&gCamLock);
1052 }
1053
Thierry Strudel3d639192016-09-09 11:52:26 -07001054 if (mExifParams.debug_params) {
1055 free(mExifParams.debug_params);
1056 mExifParams.debug_params = NULL;
1057 }
1058 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1059 LOGW("Failed to release flash for camera id: %d",
1060 mCameraId);
1061 }
1062 mState = CLOSED;
1063 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1064 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001065
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001066 {
1067 Mutex::Autolock l(gHdrPlusClientLock);
1068 if (gHdrPlusClient != nullptr) {
1069 // Disable HDR+ mode.
1070 disableHdrPlusModeLocked();
1071 // Disconnect Easel if it's connected.
1072 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1073 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001074 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001075
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001076 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001077 rc = gEaselManagerClient.stopMipi(mCameraId);
1078 if (rc != 0) {
1079 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1080 }
1081
1082 rc = gEaselManagerClient.suspend();
1083 if (rc != 0) {
1084 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1085 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001086 }
1087 }
1088
Thierry Strudel3d639192016-09-09 11:52:26 -07001089 return rc;
1090}
1091
1092/*===========================================================================
1093 * FUNCTION : initialize
1094 *
1095 * DESCRIPTION: Initialize frameworks callback functions
1096 *
1097 * PARAMETERS :
1098 * @callback_ops : callback function to frameworks
1099 *
1100 * RETURN :
1101 *
1102 *==========================================================================*/
1103int QCamera3HardwareInterface::initialize(
1104 const struct camera3_callback_ops *callback_ops)
1105{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001106 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001107 int rc;
1108
1109 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1110 pthread_mutex_lock(&mMutex);
1111
1112 // Validate current state
1113 switch (mState) {
1114 case OPENED:
1115 /* valid state */
1116 break;
1117 default:
1118 LOGE("Invalid state %d", mState);
1119 rc = -ENODEV;
1120 goto err1;
1121 }
1122
1123 rc = initParameters();
1124 if (rc < 0) {
1125 LOGE("initParamters failed %d", rc);
1126 goto err1;
1127 }
1128 mCallbackOps = callback_ops;
1129
1130 mChannelHandle = mCameraHandle->ops->add_channel(
1131 mCameraHandle->camera_handle, NULL, NULL, this);
1132 if (mChannelHandle == 0) {
1133 LOGE("add_channel failed");
1134 rc = -ENOMEM;
1135 pthread_mutex_unlock(&mMutex);
1136 return rc;
1137 }
1138
1139 pthread_mutex_unlock(&mMutex);
1140 mCameraInitialized = true;
1141 mState = INITIALIZED;
1142 LOGI("X");
1143 return 0;
1144
1145err1:
1146 pthread_mutex_unlock(&mMutex);
1147 return rc;
1148}
1149
1150/*===========================================================================
1151 * FUNCTION : validateStreamDimensions
1152 *
1153 * DESCRIPTION: Check if the configuration requested are those advertised
1154 *
1155 * PARAMETERS :
1156 * @stream_list : streams to be configured
1157 *
1158 * RETURN :
1159 *
1160 *==========================================================================*/
1161int QCamera3HardwareInterface::validateStreamDimensions(
1162 camera3_stream_configuration_t *streamList)
1163{
1164 int rc = NO_ERROR;
1165 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001166 uint32_t depthWidth = 0;
1167 uint32_t depthHeight = 0;
1168 if (mPDSupported) {
1169 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1170 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1171 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001172
1173 camera3_stream_t *inputStream = NULL;
1174 /*
1175 * Loop through all streams to find input stream if it exists*
1176 */
1177 for (size_t i = 0; i< streamList->num_streams; i++) {
1178 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1179 if (inputStream != NULL) {
1180 LOGE("Error, Multiple input streams requested");
1181 return -EINVAL;
1182 }
1183 inputStream = streamList->streams[i];
1184 }
1185 }
1186 /*
1187 * Loop through all streams requested in configuration
1188 * Check if unsupported sizes have been requested on any of them
1189 */
1190 for (size_t j = 0; j < streamList->num_streams; j++) {
1191 bool sizeFound = false;
1192 camera3_stream_t *newStream = streamList->streams[j];
1193
1194 uint32_t rotatedHeight = newStream->height;
1195 uint32_t rotatedWidth = newStream->width;
1196 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1197 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1198 rotatedHeight = newStream->width;
1199 rotatedWidth = newStream->height;
1200 }
1201
1202 /*
1203 * Sizes are different for each type of stream format check against
1204 * appropriate table.
1205 */
1206 switch (newStream->format) {
1207 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1208 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1209 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001210 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1211 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1212 mPDSupported) {
1213 if ((depthWidth == newStream->width) &&
1214 (depthHeight == newStream->height)) {
1215 sizeFound = true;
1216 }
1217 break;
1218 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001219 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1220 for (size_t i = 0; i < count; i++) {
1221 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1222 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1223 sizeFound = true;
1224 break;
1225 }
1226 }
1227 break;
1228 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001229 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1230 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001231 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001232 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001233 if ((depthSamplesCount == newStream->width) &&
1234 (1 == newStream->height)) {
1235 sizeFound = true;
1236 }
1237 break;
1238 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001239 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1240 /* Verify set size against generated sizes table */
1241 for (size_t i = 0; i < count; i++) {
1242 if (((int32_t)rotatedWidth ==
1243 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1244 ((int32_t)rotatedHeight ==
1245 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1246 sizeFound = true;
1247 break;
1248 }
1249 }
1250 break;
1251 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1252 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1253 default:
1254 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1255 || newStream->stream_type == CAMERA3_STREAM_INPUT
1256 || IS_USAGE_ZSL(newStream->usage)) {
1257 if (((int32_t)rotatedWidth ==
1258 gCamCapability[mCameraId]->active_array_size.width) &&
1259 ((int32_t)rotatedHeight ==
1260 gCamCapability[mCameraId]->active_array_size.height)) {
1261 sizeFound = true;
1262 break;
1263 }
1264 /* We could potentially break here to enforce ZSL stream
1265 * set from frameworks always is full active array size
1266 * but it is not clear from the spc if framework will always
1267 * follow that, also we have logic to override to full array
1268 * size, so keeping the logic lenient at the moment
1269 */
1270 }
1271 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1272 MAX_SIZES_CNT);
1273 for (size_t i = 0; i < count; i++) {
1274 if (((int32_t)rotatedWidth ==
1275 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1276 ((int32_t)rotatedHeight ==
1277 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1278 sizeFound = true;
1279 break;
1280 }
1281 }
1282 break;
1283 } /* End of switch(newStream->format) */
1284
1285 /* We error out even if a single stream has unsupported size set */
1286 if (!sizeFound) {
1287 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1288 rotatedWidth, rotatedHeight, newStream->format,
1289 gCamCapability[mCameraId]->active_array_size.width,
1290 gCamCapability[mCameraId]->active_array_size.height);
1291 rc = -EINVAL;
1292 break;
1293 }
1294 } /* End of for each stream */
1295 return rc;
1296}
1297
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001298/*===========================================================================
1299 * FUNCTION : validateUsageFlags
1300 *
1301 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1302 *
1303 * PARAMETERS :
1304 * @stream_list : streams to be configured
1305 *
1306 * RETURN :
1307 * NO_ERROR if the usage flags are supported
1308 * error code if usage flags are not supported
1309 *
1310 *==========================================================================*/
1311int QCamera3HardwareInterface::validateUsageFlags(
1312 const camera3_stream_configuration_t* streamList)
1313{
1314 for (size_t j = 0; j < streamList->num_streams; j++) {
1315 const camera3_stream_t *newStream = streamList->streams[j];
1316
1317 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1318 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1319 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1320 continue;
1321 }
1322
1323 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1324 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1325 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1326 bool forcePreviewUBWC = true;
1327 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1328 forcePreviewUBWC = false;
1329 }
1330 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1331 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1332 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1333 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1334 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1335 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1336
1337 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1338 // So color spaces will always match.
1339
1340 // Check whether underlying formats of shared streams match.
1341 if (isVideo && isPreview && videoFormat != previewFormat) {
1342 LOGE("Combined video and preview usage flag is not supported");
1343 return -EINVAL;
1344 }
1345 if (isPreview && isZSL && previewFormat != zslFormat) {
1346 LOGE("Combined preview and zsl usage flag is not supported");
1347 return -EINVAL;
1348 }
1349 if (isVideo && isZSL && videoFormat != zslFormat) {
1350 LOGE("Combined video and zsl usage flag is not supported");
1351 return -EINVAL;
1352 }
1353 }
1354 return NO_ERROR;
1355}
1356
1357/*===========================================================================
1358 * FUNCTION : validateUsageFlagsForEis
1359 *
1360 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1361 *
1362 * PARAMETERS :
1363 * @stream_list : streams to be configured
1364 *
1365 * RETURN :
1366 * NO_ERROR if the usage flags are supported
1367 * error code if usage flags are not supported
1368 *
1369 *==========================================================================*/
1370int QCamera3HardwareInterface::validateUsageFlagsForEis(
1371 const camera3_stream_configuration_t* streamList)
1372{
1373 for (size_t j = 0; j < streamList->num_streams; j++) {
1374 const camera3_stream_t *newStream = streamList->streams[j];
1375
1376 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1377 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1378
1379 // Because EIS is "hard-coded" for certain use case, and current
1380 // implementation doesn't support shared preview and video on the same
1381 // stream, return failure if EIS is forced on.
1382 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1383 LOGE("Combined video and preview usage flag is not supported due to EIS");
1384 return -EINVAL;
1385 }
1386 }
1387 return NO_ERROR;
1388}
1389
Thierry Strudel3d639192016-09-09 11:52:26 -07001390/*==============================================================================
1391 * FUNCTION : isSupportChannelNeeded
1392 *
1393 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1394 *
1395 * PARAMETERS :
1396 * @stream_list : streams to be configured
1397 * @stream_config_info : the config info for streams to be configured
1398 *
1399 * RETURN : Boolen true/false decision
1400 *
1401 *==========================================================================*/
1402bool QCamera3HardwareInterface::isSupportChannelNeeded(
1403 camera3_stream_configuration_t *streamList,
1404 cam_stream_size_info_t stream_config_info)
1405{
1406 uint32_t i;
1407 bool pprocRequested = false;
1408 /* Check for conditions where PProc pipeline does not have any streams*/
1409 for (i = 0; i < stream_config_info.num_streams; i++) {
1410 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1411 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1412 pprocRequested = true;
1413 break;
1414 }
1415 }
1416
1417 if (pprocRequested == false )
1418 return true;
1419
1420 /* Dummy stream needed if only raw or jpeg streams present */
1421 for (i = 0; i < streamList->num_streams; i++) {
1422 switch(streamList->streams[i]->format) {
1423 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1424 case HAL_PIXEL_FORMAT_RAW10:
1425 case HAL_PIXEL_FORMAT_RAW16:
1426 case HAL_PIXEL_FORMAT_BLOB:
1427 break;
1428 default:
1429 return false;
1430 }
1431 }
1432 return true;
1433}
1434
1435/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001436 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001437 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001438 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001439 *
1440 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001441 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001442 *
1443 * RETURN : int32_t type of status
1444 * NO_ERROR -- success
1445 * none-zero failure code
1446 *
1447 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001448int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001449{
1450 int32_t rc = NO_ERROR;
1451
1452 cam_dimension_t max_dim = {0, 0};
1453 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1454 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1455 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1456 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1457 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1458 }
1459
1460 clear_metadata_buffer(mParameters);
1461
1462 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1463 max_dim);
1464 if (rc != NO_ERROR) {
1465 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1466 return rc;
1467 }
1468
1469 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1470 if (rc != NO_ERROR) {
1471 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1472 return rc;
1473 }
1474
1475 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001476 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001477
1478 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1479 mParameters);
1480 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001481 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001482 return rc;
1483 }
1484
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001485 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001486 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1487 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1488 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1489 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1490 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001491
1492 return rc;
1493}
1494
1495/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001496 * FUNCTION : addToPPFeatureMask
1497 *
1498 * DESCRIPTION: add additional features to pp feature mask based on
1499 * stream type and usecase
1500 *
1501 * PARAMETERS :
1502 * @stream_format : stream type for feature mask
1503 * @stream_idx : stream idx within postprocess_mask list to change
1504 *
1505 * RETURN : NULL
1506 *
1507 *==========================================================================*/
1508void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1509 uint32_t stream_idx)
1510{
1511 char feature_mask_value[PROPERTY_VALUE_MAX];
1512 cam_feature_mask_t feature_mask;
1513 int args_converted;
1514 int property_len;
1515
1516 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001517#ifdef _LE_CAMERA_
1518 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1519 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1520 property_len = property_get("persist.camera.hal3.feature",
1521 feature_mask_value, swtnr_feature_mask_value);
1522#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001523 property_len = property_get("persist.camera.hal3.feature",
1524 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001525#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001526 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1527 (feature_mask_value[1] == 'x')) {
1528 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1529 } else {
1530 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1531 }
1532 if (1 != args_converted) {
1533 feature_mask = 0;
1534 LOGE("Wrong feature mask %s", feature_mask_value);
1535 return;
1536 }
1537
1538 switch (stream_format) {
1539 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1540 /* Add LLVD to pp feature mask only if video hint is enabled */
1541 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1542 mStreamConfigInfo.postprocess_mask[stream_idx]
1543 |= CAM_QTI_FEATURE_SW_TNR;
1544 LOGH("Added SW TNR to pp feature mask");
1545 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1546 mStreamConfigInfo.postprocess_mask[stream_idx]
1547 |= CAM_QCOM_FEATURE_LLVD;
1548 LOGH("Added LLVD SeeMore to pp feature mask");
1549 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001550 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1551 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1552 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1553 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001554 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1555 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1556 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1557 CAM_QTI_FEATURE_BINNING_CORRECTION;
1558 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001559 break;
1560 }
1561 default:
1562 break;
1563 }
1564 LOGD("PP feature mask %llx",
1565 mStreamConfigInfo.postprocess_mask[stream_idx]);
1566}
1567
1568/*==============================================================================
1569 * FUNCTION : updateFpsInPreviewBuffer
1570 *
1571 * DESCRIPTION: update FPS information in preview buffer.
1572 *
1573 * PARAMETERS :
1574 * @metadata : pointer to metadata buffer
1575 * @frame_number: frame_number to look for in pending buffer list
1576 *
1577 * RETURN : None
1578 *
1579 *==========================================================================*/
1580void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1581 uint32_t frame_number)
1582{
1583 // Mark all pending buffers for this particular request
1584 // with corresponding framerate information
1585 for (List<PendingBuffersInRequest>::iterator req =
1586 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1587 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1588 for(List<PendingBufferInfo>::iterator j =
1589 req->mPendingBufferList.begin();
1590 j != req->mPendingBufferList.end(); j++) {
1591 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1592 if ((req->frame_number == frame_number) &&
1593 (channel->getStreamTypeMask() &
1594 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1595 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1596 CAM_INTF_PARM_FPS_RANGE, metadata) {
1597 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1598 struct private_handle_t *priv_handle =
1599 (struct private_handle_t *)(*(j->buffer));
1600 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1601 }
1602 }
1603 }
1604 }
1605}
1606
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001607/*==============================================================================
1608 * FUNCTION : updateTimeStampInPendingBuffers
1609 *
1610 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1611 * of a frame number
1612 *
1613 * PARAMETERS :
1614 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1615 * @timestamp : timestamp to be set
1616 *
1617 * RETURN : None
1618 *
1619 *==========================================================================*/
1620void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1621 uint32_t frameNumber, nsecs_t timestamp)
1622{
1623 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1624 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1625 if (req->frame_number != frameNumber)
1626 continue;
1627
1628 for (auto k = req->mPendingBufferList.begin();
1629 k != req->mPendingBufferList.end(); k++ ) {
1630 struct private_handle_t *priv_handle =
1631 (struct private_handle_t *) (*(k->buffer));
1632 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1633 }
1634 }
1635 return;
1636}
1637
Thierry Strudel3d639192016-09-09 11:52:26 -07001638/*===========================================================================
1639 * FUNCTION : configureStreams
1640 *
1641 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1642 * and output streams.
1643 *
1644 * PARAMETERS :
1645 * @stream_list : streams to be configured
1646 *
1647 * RETURN :
1648 *
1649 *==========================================================================*/
1650int QCamera3HardwareInterface::configureStreams(
1651 camera3_stream_configuration_t *streamList)
1652{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001653 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001654 int rc = 0;
1655
1656 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001657 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001658 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001659 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001660
1661 return rc;
1662}
1663
1664/*===========================================================================
1665 * FUNCTION : configureStreamsPerfLocked
1666 *
1667 * DESCRIPTION: configureStreams while perfLock is held.
1668 *
1669 * PARAMETERS :
1670 * @stream_list : streams to be configured
1671 *
1672 * RETURN : int32_t type of status
1673 * NO_ERROR -- success
1674 * none-zero failure code
1675 *==========================================================================*/
1676int QCamera3HardwareInterface::configureStreamsPerfLocked(
1677 camera3_stream_configuration_t *streamList)
1678{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001679 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001680 int rc = 0;
1681
1682 // Sanity check stream_list
1683 if (streamList == NULL) {
1684 LOGE("NULL stream configuration");
1685 return BAD_VALUE;
1686 }
1687 if (streamList->streams == NULL) {
1688 LOGE("NULL stream list");
1689 return BAD_VALUE;
1690 }
1691
1692 if (streamList->num_streams < 1) {
1693 LOGE("Bad number of streams requested: %d",
1694 streamList->num_streams);
1695 return BAD_VALUE;
1696 }
1697
1698 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1699 LOGE("Maximum number of streams %d exceeded: %d",
1700 MAX_NUM_STREAMS, streamList->num_streams);
1701 return BAD_VALUE;
1702 }
1703
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001704 rc = validateUsageFlags(streamList);
1705 if (rc != NO_ERROR) {
1706 return rc;
1707 }
1708
Thierry Strudel3d639192016-09-09 11:52:26 -07001709 mOpMode = streamList->operation_mode;
1710 LOGD("mOpMode: %d", mOpMode);
1711
1712 /* first invalidate all the steams in the mStreamList
1713 * if they appear again, they will be validated */
1714 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1715 it != mStreamInfo.end(); it++) {
1716 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1717 if (channel) {
1718 channel->stop();
1719 }
1720 (*it)->status = INVALID;
1721 }
1722
1723 if (mRawDumpChannel) {
1724 mRawDumpChannel->stop();
1725 delete mRawDumpChannel;
1726 mRawDumpChannel = NULL;
1727 }
1728
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001729 if (mHdrPlusRawSrcChannel) {
1730 mHdrPlusRawSrcChannel->stop();
1731 delete mHdrPlusRawSrcChannel;
1732 mHdrPlusRawSrcChannel = NULL;
1733 }
1734
Thierry Strudel3d639192016-09-09 11:52:26 -07001735 if (mSupportChannel)
1736 mSupportChannel->stop();
1737
1738 if (mAnalysisChannel) {
1739 mAnalysisChannel->stop();
1740 }
1741 if (mMetadataChannel) {
1742 /* If content of mStreamInfo is not 0, there is metadata stream */
1743 mMetadataChannel->stop();
1744 }
1745 if (mChannelHandle) {
1746 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1747 mChannelHandle);
1748 LOGD("stopping channel %d", mChannelHandle);
1749 }
1750
1751 pthread_mutex_lock(&mMutex);
1752
1753 // Check state
1754 switch (mState) {
1755 case INITIALIZED:
1756 case CONFIGURED:
1757 case STARTED:
1758 /* valid state */
1759 break;
1760 default:
1761 LOGE("Invalid state %d", mState);
1762 pthread_mutex_unlock(&mMutex);
1763 return -ENODEV;
1764 }
1765
1766 /* Check whether we have video stream */
1767 m_bIs4KVideo = false;
1768 m_bIsVideo = false;
1769 m_bEisSupportedSize = false;
1770 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001771 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001772 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001773 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001774 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001775 uint32_t videoWidth = 0U;
1776 uint32_t videoHeight = 0U;
1777 size_t rawStreamCnt = 0;
1778 size_t stallStreamCnt = 0;
1779 size_t processedStreamCnt = 0;
1780 // Number of streams on ISP encoder path
1781 size_t numStreamsOnEncoder = 0;
1782 size_t numYuv888OnEncoder = 0;
1783 bool bYuv888OverrideJpeg = false;
1784 cam_dimension_t largeYuv888Size = {0, 0};
1785 cam_dimension_t maxViewfinderSize = {0, 0};
1786 bool bJpegExceeds4K = false;
1787 bool bJpegOnEncoder = false;
1788 bool bUseCommonFeatureMask = false;
1789 cam_feature_mask_t commonFeatureMask = 0;
1790 bool bSmallJpegSize = false;
1791 uint32_t width_ratio;
1792 uint32_t height_ratio;
1793 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1794 camera3_stream_t *inputStream = NULL;
1795 bool isJpeg = false;
1796 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001797 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001798 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001799
1800 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1801
1802 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001803 uint8_t eis_prop_set;
1804 uint32_t maxEisWidth = 0;
1805 uint32_t maxEisHeight = 0;
1806
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001807 // Initialize all instant AEC related variables
1808 mInstantAEC = false;
1809 mResetInstantAEC = false;
1810 mInstantAECSettledFrameNumber = 0;
1811 mAecSkipDisplayFrameBound = 0;
1812 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001813 mCurrFeatureState = 0;
1814 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001815
Thierry Strudel3d639192016-09-09 11:52:26 -07001816 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1817
1818 size_t count = IS_TYPE_MAX;
1819 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1820 for (size_t i = 0; i < count; i++) {
1821 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001822 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1823 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001824 break;
1825 }
1826 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001827
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001828 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001829 maxEisWidth = MAX_EIS_WIDTH;
1830 maxEisHeight = MAX_EIS_HEIGHT;
1831 }
1832
1833 /* EIS setprop control */
1834 char eis_prop[PROPERTY_VALUE_MAX];
1835 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001836 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001837 eis_prop_set = (uint8_t)atoi(eis_prop);
1838
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001839 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001840 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1841
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001842 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1843 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001844
Thierry Strudel3d639192016-09-09 11:52:26 -07001845 /* stream configurations */
1846 for (size_t i = 0; i < streamList->num_streams; i++) {
1847 camera3_stream_t *newStream = streamList->streams[i];
1848 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1849 "height = %d, rotation = %d, usage = 0x%x",
1850 i, newStream->stream_type, newStream->format,
1851 newStream->width, newStream->height, newStream->rotation,
1852 newStream->usage);
1853 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1854 newStream->stream_type == CAMERA3_STREAM_INPUT){
1855 isZsl = true;
1856 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001857 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1858 IS_USAGE_PREVIEW(newStream->usage)) {
1859 isPreview = true;
1860 }
1861
Thierry Strudel3d639192016-09-09 11:52:26 -07001862 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1863 inputStream = newStream;
1864 }
1865
Emilian Peev7650c122017-01-19 08:24:33 -08001866 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1867 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001868 isJpeg = true;
1869 jpegSize.width = newStream->width;
1870 jpegSize.height = newStream->height;
1871 if (newStream->width > VIDEO_4K_WIDTH ||
1872 newStream->height > VIDEO_4K_HEIGHT)
1873 bJpegExceeds4K = true;
1874 }
1875
1876 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1877 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1878 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001879 // In HAL3 we can have multiple different video streams.
1880 // The variables video width and height are used below as
1881 // dimensions of the biggest of them
1882 if (videoWidth < newStream->width ||
1883 videoHeight < newStream->height) {
1884 videoWidth = newStream->width;
1885 videoHeight = newStream->height;
1886 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001887 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1888 (VIDEO_4K_HEIGHT <= newStream->height)) {
1889 m_bIs4KVideo = true;
1890 }
1891 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1892 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001893
Thierry Strudel3d639192016-09-09 11:52:26 -07001894 }
1895 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1896 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1897 switch (newStream->format) {
1898 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001899 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1900 depthPresent = true;
1901 break;
1902 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001903 stallStreamCnt++;
1904 if (isOnEncoder(maxViewfinderSize, newStream->width,
1905 newStream->height)) {
1906 numStreamsOnEncoder++;
1907 bJpegOnEncoder = true;
1908 }
1909 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1910 newStream->width);
1911 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1912 newStream->height);;
1913 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1914 "FATAL: max_downscale_factor cannot be zero and so assert");
1915 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1916 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1917 LOGH("Setting small jpeg size flag to true");
1918 bSmallJpegSize = true;
1919 }
1920 break;
1921 case HAL_PIXEL_FORMAT_RAW10:
1922 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1923 case HAL_PIXEL_FORMAT_RAW16:
1924 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001925 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1926 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1927 pdStatCount++;
1928 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001929 break;
1930 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1931 processedStreamCnt++;
1932 if (isOnEncoder(maxViewfinderSize, newStream->width,
1933 newStream->height)) {
1934 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1935 !IS_USAGE_ZSL(newStream->usage)) {
1936 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1937 }
1938 numStreamsOnEncoder++;
1939 }
1940 break;
1941 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1942 processedStreamCnt++;
1943 if (isOnEncoder(maxViewfinderSize, newStream->width,
1944 newStream->height)) {
1945 // If Yuv888 size is not greater than 4K, set feature mask
1946 // to SUPERSET so that it support concurrent request on
1947 // YUV and JPEG.
1948 if (newStream->width <= VIDEO_4K_WIDTH &&
1949 newStream->height <= VIDEO_4K_HEIGHT) {
1950 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1951 }
1952 numStreamsOnEncoder++;
1953 numYuv888OnEncoder++;
1954 largeYuv888Size.width = newStream->width;
1955 largeYuv888Size.height = newStream->height;
1956 }
1957 break;
1958 default:
1959 processedStreamCnt++;
1960 if (isOnEncoder(maxViewfinderSize, newStream->width,
1961 newStream->height)) {
1962 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1963 numStreamsOnEncoder++;
1964 }
1965 break;
1966 }
1967
1968 }
1969 }
1970
1971 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1972 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1973 !m_bIsVideo) {
1974 m_bEisEnable = false;
1975 }
1976
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001977 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1978 pthread_mutex_unlock(&mMutex);
1979 return -EINVAL;
1980 }
1981
Thierry Strudel54dc9782017-02-15 12:12:10 -08001982 uint8_t forceEnableTnr = 0;
1983 char tnr_prop[PROPERTY_VALUE_MAX];
1984 memset(tnr_prop, 0, sizeof(tnr_prop));
1985 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1986 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1987
Thierry Strudel3d639192016-09-09 11:52:26 -07001988 /* Logic to enable/disable TNR based on specific config size/etc.*/
1989 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1990 ((videoWidth == 1920 && videoHeight == 1080) ||
1991 (videoWidth == 1280 && videoHeight == 720)) &&
1992 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1993 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001994 else if (forceEnableTnr)
1995 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001996
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001997 char videoHdrProp[PROPERTY_VALUE_MAX];
1998 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1999 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2000 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2001
2002 if (hdr_mode_prop == 1 && m_bIsVideo &&
2003 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2004 m_bVideoHdrEnabled = true;
2005 else
2006 m_bVideoHdrEnabled = false;
2007
2008
Thierry Strudel3d639192016-09-09 11:52:26 -07002009 /* Check if num_streams is sane */
2010 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2011 rawStreamCnt > MAX_RAW_STREAMS ||
2012 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2013 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2014 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2015 pthread_mutex_unlock(&mMutex);
2016 return -EINVAL;
2017 }
2018 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002019 if (isZsl && m_bIs4KVideo) {
2020 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002021 pthread_mutex_unlock(&mMutex);
2022 return -EINVAL;
2023 }
2024 /* Check if stream sizes are sane */
2025 if (numStreamsOnEncoder > 2) {
2026 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2027 pthread_mutex_unlock(&mMutex);
2028 return -EINVAL;
2029 } else if (1 < numStreamsOnEncoder){
2030 bUseCommonFeatureMask = true;
2031 LOGH("Multiple streams above max viewfinder size, common mask needed");
2032 }
2033
2034 /* Check if BLOB size is greater than 4k in 4k recording case */
2035 if (m_bIs4KVideo && bJpegExceeds4K) {
2036 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2037 pthread_mutex_unlock(&mMutex);
2038 return -EINVAL;
2039 }
2040
Emilian Peev7650c122017-01-19 08:24:33 -08002041 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2042 depthPresent) {
2043 LOGE("HAL doesn't support depth streams in HFR mode!");
2044 pthread_mutex_unlock(&mMutex);
2045 return -EINVAL;
2046 }
2047
Thierry Strudel3d639192016-09-09 11:52:26 -07002048 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2049 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2050 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2051 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2052 // configurations:
2053 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2054 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2055 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2056 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2057 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2058 __func__);
2059 pthread_mutex_unlock(&mMutex);
2060 return -EINVAL;
2061 }
2062
2063 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2064 // the YUV stream's size is greater or equal to the JPEG size, set common
2065 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2066 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2067 jpegSize.width, jpegSize.height) &&
2068 largeYuv888Size.width > jpegSize.width &&
2069 largeYuv888Size.height > jpegSize.height) {
2070 bYuv888OverrideJpeg = true;
2071 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2072 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2073 }
2074
2075 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2076 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2077 commonFeatureMask);
2078 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2079 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2080
2081 rc = validateStreamDimensions(streamList);
2082 if (rc == NO_ERROR) {
2083 rc = validateStreamRotations(streamList);
2084 }
2085 if (rc != NO_ERROR) {
2086 LOGE("Invalid stream configuration requested!");
2087 pthread_mutex_unlock(&mMutex);
2088 return rc;
2089 }
2090
Emilian Peev0f3c3162017-03-15 12:57:46 +00002091 if (1 < pdStatCount) {
2092 LOGE("HAL doesn't support multiple PD streams");
2093 pthread_mutex_unlock(&mMutex);
2094 return -EINVAL;
2095 }
2096
2097 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2098 (1 == pdStatCount)) {
2099 LOGE("HAL doesn't support PD streams in HFR mode!");
2100 pthread_mutex_unlock(&mMutex);
2101 return -EINVAL;
2102 }
2103
Thierry Strudel3d639192016-09-09 11:52:26 -07002104 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2105 for (size_t i = 0; i < streamList->num_streams; i++) {
2106 camera3_stream_t *newStream = streamList->streams[i];
2107 LOGH("newStream type = %d, stream format = %d "
2108 "stream size : %d x %d, stream rotation = %d",
2109 newStream->stream_type, newStream->format,
2110 newStream->width, newStream->height, newStream->rotation);
2111 //if the stream is in the mStreamList validate it
2112 bool stream_exists = false;
2113 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2114 it != mStreamInfo.end(); it++) {
2115 if ((*it)->stream == newStream) {
2116 QCamera3ProcessingChannel *channel =
2117 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2118 stream_exists = true;
2119 if (channel)
2120 delete channel;
2121 (*it)->status = VALID;
2122 (*it)->stream->priv = NULL;
2123 (*it)->channel = NULL;
2124 }
2125 }
2126 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2127 //new stream
2128 stream_info_t* stream_info;
2129 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2130 if (!stream_info) {
2131 LOGE("Could not allocate stream info");
2132 rc = -ENOMEM;
2133 pthread_mutex_unlock(&mMutex);
2134 return rc;
2135 }
2136 stream_info->stream = newStream;
2137 stream_info->status = VALID;
2138 stream_info->channel = NULL;
2139 mStreamInfo.push_back(stream_info);
2140 }
2141 /* Covers Opaque ZSL and API1 F/W ZSL */
2142 if (IS_USAGE_ZSL(newStream->usage)
2143 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2144 if (zslStream != NULL) {
2145 LOGE("Multiple input/reprocess streams requested!");
2146 pthread_mutex_unlock(&mMutex);
2147 return BAD_VALUE;
2148 }
2149 zslStream = newStream;
2150 }
2151 /* Covers YUV reprocess */
2152 if (inputStream != NULL) {
2153 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2154 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2155 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2156 && inputStream->width == newStream->width
2157 && inputStream->height == newStream->height) {
2158 if (zslStream != NULL) {
2159 /* This scenario indicates multiple YUV streams with same size
2160 * as input stream have been requested, since zsl stream handle
2161 * is solely use for the purpose of overriding the size of streams
2162 * which share h/w streams we will just make a guess here as to
2163 * which of the stream is a ZSL stream, this will be refactored
2164 * once we make generic logic for streams sharing encoder output
2165 */
2166 LOGH("Warning, Multiple ip/reprocess streams requested!");
2167 }
2168 zslStream = newStream;
2169 }
2170 }
2171 }
2172
2173 /* If a zsl stream is set, we know that we have configured at least one input or
2174 bidirectional stream */
2175 if (NULL != zslStream) {
2176 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2177 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2178 mInputStreamInfo.format = zslStream->format;
2179 mInputStreamInfo.usage = zslStream->usage;
2180 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2181 mInputStreamInfo.dim.width,
2182 mInputStreamInfo.dim.height,
2183 mInputStreamInfo.format, mInputStreamInfo.usage);
2184 }
2185
2186 cleanAndSortStreamInfo();
2187 if (mMetadataChannel) {
2188 delete mMetadataChannel;
2189 mMetadataChannel = NULL;
2190 }
2191 if (mSupportChannel) {
2192 delete mSupportChannel;
2193 mSupportChannel = NULL;
2194 }
2195
2196 if (mAnalysisChannel) {
2197 delete mAnalysisChannel;
2198 mAnalysisChannel = NULL;
2199 }
2200
2201 if (mDummyBatchChannel) {
2202 delete mDummyBatchChannel;
2203 mDummyBatchChannel = NULL;
2204 }
2205
Emilian Peev7650c122017-01-19 08:24:33 -08002206 if (mDepthChannel) {
2207 mDepthChannel = NULL;
2208 }
2209
Thierry Strudel2896d122017-02-23 19:18:03 -08002210 char is_type_value[PROPERTY_VALUE_MAX];
2211 property_get("persist.camera.is_type", is_type_value, "4");
2212 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2213
Thierry Strudel3d639192016-09-09 11:52:26 -07002214 //Create metadata channel and initialize it
2215 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2216 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2217 gCamCapability[mCameraId]->color_arrangement);
2218 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2219 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002220 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002221 if (mMetadataChannel == NULL) {
2222 LOGE("failed to allocate metadata channel");
2223 rc = -ENOMEM;
2224 pthread_mutex_unlock(&mMutex);
2225 return rc;
2226 }
2227 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2228 if (rc < 0) {
2229 LOGE("metadata channel initialization failed");
2230 delete mMetadataChannel;
2231 mMetadataChannel = NULL;
2232 pthread_mutex_unlock(&mMutex);
2233 return rc;
2234 }
2235
Thierry Strudel2896d122017-02-23 19:18:03 -08002236 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002237 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002238 bool onlyRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002239 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2240 /* Allocate channel objects for the requested streams */
2241 for (size_t i = 0; i < streamList->num_streams; i++) {
2242 camera3_stream_t *newStream = streamList->streams[i];
2243 uint32_t stream_usage = newStream->usage;
2244 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2245 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2246 struct camera_info *p_info = NULL;
2247 pthread_mutex_lock(&gCamLock);
2248 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2249 pthread_mutex_unlock(&gCamLock);
2250 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2251 || IS_USAGE_ZSL(newStream->usage)) &&
2252 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002253 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002254 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002255 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2256 if (bUseCommonFeatureMask)
2257 zsl_ppmask = commonFeatureMask;
2258 else
2259 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002260 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002261 if (numStreamsOnEncoder > 0)
2262 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2263 else
2264 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002265 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002266 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002267 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002268 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002269 LOGH("Input stream configured, reprocess config");
2270 } else {
2271 //for non zsl streams find out the format
2272 switch (newStream->format) {
2273 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2274 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002275 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002276 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2277 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2278 /* add additional features to pp feature mask */
2279 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2280 mStreamConfigInfo.num_streams);
2281
2282 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2283 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2284 CAM_STREAM_TYPE_VIDEO;
2285 if (m_bTnrEnabled && m_bTnrVideo) {
2286 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2287 CAM_QCOM_FEATURE_CPP_TNR;
2288 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2289 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2290 ~CAM_QCOM_FEATURE_CDS;
2291 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002292 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2293 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2294 CAM_QTI_FEATURE_PPEISCORE;
2295 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002296 } else {
2297 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2298 CAM_STREAM_TYPE_PREVIEW;
2299 if (m_bTnrEnabled && m_bTnrPreview) {
2300 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2301 CAM_QCOM_FEATURE_CPP_TNR;
2302 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2303 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2304 ~CAM_QCOM_FEATURE_CDS;
2305 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002306 if(!m_bSwTnrPreview) {
2307 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2308 ~CAM_QTI_FEATURE_SW_TNR;
2309 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002310 padding_info.width_padding = mSurfaceStridePadding;
2311 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002312 previewSize.width = (int32_t)newStream->width;
2313 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002314 }
2315 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2316 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2317 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2318 newStream->height;
2319 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2320 newStream->width;
2321 }
2322 }
2323 break;
2324 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002325 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002326 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2327 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2328 if (bUseCommonFeatureMask)
2329 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2330 commonFeatureMask;
2331 else
2332 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2333 CAM_QCOM_FEATURE_NONE;
2334 } else {
2335 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2336 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2337 }
2338 break;
2339 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002340 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002341 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2342 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2343 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2344 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2345 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002346 /* Remove rotation if it is not supported
2347 for 4K LiveVideo snapshot case (online processing) */
2348 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2349 CAM_QCOM_FEATURE_ROTATION)) {
2350 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2351 &= ~CAM_QCOM_FEATURE_ROTATION;
2352 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002353 } else {
2354 if (bUseCommonFeatureMask &&
2355 isOnEncoder(maxViewfinderSize, newStream->width,
2356 newStream->height)) {
2357 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2358 } else {
2359 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2360 }
2361 }
2362 if (isZsl) {
2363 if (zslStream) {
2364 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2365 (int32_t)zslStream->width;
2366 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2367 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002368 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2369 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002370 } else {
2371 LOGE("Error, No ZSL stream identified");
2372 pthread_mutex_unlock(&mMutex);
2373 return -EINVAL;
2374 }
2375 } else if (m_bIs4KVideo) {
2376 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2377 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2378 } else if (bYuv888OverrideJpeg) {
2379 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2380 (int32_t)largeYuv888Size.width;
2381 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2382 (int32_t)largeYuv888Size.height;
2383 }
2384 break;
2385 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2386 case HAL_PIXEL_FORMAT_RAW16:
2387 case HAL_PIXEL_FORMAT_RAW10:
2388 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2389 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2390 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002391 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2392 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2393 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2394 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2395 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2396 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2397 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2398 gCamCapability[mCameraId]->dt[mPDIndex];
2399 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2400 gCamCapability[mCameraId]->vc[mPDIndex];
2401 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002402 break;
2403 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002404 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002405 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2406 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2407 break;
2408 }
2409 }
2410
2411 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2412 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2413 gCamCapability[mCameraId]->color_arrangement);
2414
2415 if (newStream->priv == NULL) {
2416 //New stream, construct channel
2417 switch (newStream->stream_type) {
2418 case CAMERA3_STREAM_INPUT:
2419 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2420 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2421 break;
2422 case CAMERA3_STREAM_BIDIRECTIONAL:
2423 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2424 GRALLOC_USAGE_HW_CAMERA_WRITE;
2425 break;
2426 case CAMERA3_STREAM_OUTPUT:
2427 /* For video encoding stream, set read/write rarely
2428 * flag so that they may be set to un-cached */
2429 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2430 newStream->usage |=
2431 (GRALLOC_USAGE_SW_READ_RARELY |
2432 GRALLOC_USAGE_SW_WRITE_RARELY |
2433 GRALLOC_USAGE_HW_CAMERA_WRITE);
2434 else if (IS_USAGE_ZSL(newStream->usage))
2435 {
2436 LOGD("ZSL usage flag skipping");
2437 }
2438 else if (newStream == zslStream
2439 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2440 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2441 } else
2442 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2443 break;
2444 default:
2445 LOGE("Invalid stream_type %d", newStream->stream_type);
2446 break;
2447 }
2448
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002449 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002450 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2451 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2452 QCamera3ProcessingChannel *channel = NULL;
2453 switch (newStream->format) {
2454 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2455 if ((newStream->usage &
2456 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2457 (streamList->operation_mode ==
2458 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2459 ) {
2460 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2461 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002462 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002463 this,
2464 newStream,
2465 (cam_stream_type_t)
2466 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2467 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2468 mMetadataChannel,
2469 0); //heap buffers are not required for HFR video channel
2470 if (channel == NULL) {
2471 LOGE("allocation of channel failed");
2472 pthread_mutex_unlock(&mMutex);
2473 return -ENOMEM;
2474 }
2475 //channel->getNumBuffers() will return 0 here so use
2476 //MAX_INFLIGH_HFR_REQUESTS
2477 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2478 newStream->priv = channel;
2479 LOGI("num video buffers in HFR mode: %d",
2480 MAX_INFLIGHT_HFR_REQUESTS);
2481 } else {
2482 /* Copy stream contents in HFR preview only case to create
2483 * dummy batch channel so that sensor streaming is in
2484 * HFR mode */
2485 if (!m_bIsVideo && (streamList->operation_mode ==
2486 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2487 mDummyBatchStream = *newStream;
2488 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002489 int bufferCount = MAX_INFLIGHT_REQUESTS;
2490 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2491 CAM_STREAM_TYPE_VIDEO) {
2492 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2493 bufferCount = MAX_VIDEO_BUFFERS;
2494 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002495 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2496 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002497 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002498 this,
2499 newStream,
2500 (cam_stream_type_t)
2501 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2502 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2503 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002504 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002505 if (channel == NULL) {
2506 LOGE("allocation of channel failed");
2507 pthread_mutex_unlock(&mMutex);
2508 return -ENOMEM;
2509 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002510 /* disable UBWC for preview, though supported,
2511 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002512 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002513 (previewSize.width == (int32_t)videoWidth)&&
2514 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002515 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002516 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002517 channel->setUBWCEnabled(forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002518 newStream->max_buffers = channel->getNumBuffers();
2519 newStream->priv = channel;
2520 }
2521 break;
2522 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2523 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2524 mChannelHandle,
2525 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002526 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002527 this,
2528 newStream,
2529 (cam_stream_type_t)
2530 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2531 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2532 mMetadataChannel);
2533 if (channel == NULL) {
2534 LOGE("allocation of YUV channel failed");
2535 pthread_mutex_unlock(&mMutex);
2536 return -ENOMEM;
2537 }
2538 newStream->max_buffers = channel->getNumBuffers();
2539 newStream->priv = channel;
2540 break;
2541 }
2542 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2543 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002544 case HAL_PIXEL_FORMAT_RAW10: {
2545 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2546 (HAL_DATASPACE_DEPTH != newStream->data_space))
2547 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002548 mRawChannel = new QCamera3RawChannel(
2549 mCameraHandle->camera_handle, mChannelHandle,
2550 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002551 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002552 this, newStream,
2553 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002554 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002555 if (mRawChannel == NULL) {
2556 LOGE("allocation of raw channel failed");
2557 pthread_mutex_unlock(&mMutex);
2558 return -ENOMEM;
2559 }
2560 newStream->max_buffers = mRawChannel->getNumBuffers();
2561 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2562 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002563 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002564 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002565 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2566 mDepthChannel = new QCamera3DepthChannel(
2567 mCameraHandle->camera_handle, mChannelHandle,
2568 mCameraHandle->ops, NULL, NULL, &padding_info,
2569 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2570 mMetadataChannel);
2571 if (NULL == mDepthChannel) {
2572 LOGE("Allocation of depth channel failed");
2573 pthread_mutex_unlock(&mMutex);
2574 return NO_MEMORY;
2575 }
2576 newStream->priv = mDepthChannel;
2577 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2578 } else {
2579 // Max live snapshot inflight buffer is 1. This is to mitigate
2580 // frame drop issues for video snapshot. The more buffers being
2581 // allocated, the more frame drops there are.
2582 mPictureChannel = new QCamera3PicChannel(
2583 mCameraHandle->camera_handle, mChannelHandle,
2584 mCameraHandle->ops, captureResultCb,
2585 setBufferErrorStatus, &padding_info, this, newStream,
2586 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2587 m_bIs4KVideo, isZsl, mMetadataChannel,
2588 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2589 if (mPictureChannel == NULL) {
2590 LOGE("allocation of channel failed");
2591 pthread_mutex_unlock(&mMutex);
2592 return -ENOMEM;
2593 }
2594 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2595 newStream->max_buffers = mPictureChannel->getNumBuffers();
2596 mPictureChannel->overrideYuvSize(
2597 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2598 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002599 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002600 break;
2601
2602 default:
2603 LOGE("not a supported format 0x%x", newStream->format);
2604 break;
2605 }
2606 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2607 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2608 } else {
2609 LOGE("Error, Unknown stream type");
2610 pthread_mutex_unlock(&mMutex);
2611 return -EINVAL;
2612 }
2613
2614 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002615 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2616 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002617 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002618 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002619 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2620 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2621 }
2622 }
2623
2624 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2625 it != mStreamInfo.end(); it++) {
2626 if ((*it)->stream == newStream) {
2627 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2628 break;
2629 }
2630 }
2631 } else {
2632 // Channel already exists for this stream
2633 // Do nothing for now
2634 }
2635 padding_info = gCamCapability[mCameraId]->padding_info;
2636
Emilian Peev7650c122017-01-19 08:24:33 -08002637 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002638 * since there is no real stream associated with it
2639 */
Emilian Peev7650c122017-01-19 08:24:33 -08002640 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002641 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2642 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002643 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002644 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002645 }
2646
Thierry Strudel2896d122017-02-23 19:18:03 -08002647 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2648 onlyRaw = false;
2649 }
2650
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002651 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002652 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002653 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002654 cam_analysis_info_t analysisInfo;
2655 int32_t ret = NO_ERROR;
2656 ret = mCommon.getAnalysisInfo(
2657 FALSE,
2658 analysisFeatureMask,
2659 &analysisInfo);
2660 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002661 cam_color_filter_arrangement_t analysis_color_arrangement =
2662 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2663 CAM_FILTER_ARRANGEMENT_Y :
2664 gCamCapability[mCameraId]->color_arrangement);
2665 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2666 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002667 cam_dimension_t analysisDim;
2668 analysisDim = mCommon.getMatchingDimension(previewSize,
2669 analysisInfo.analysis_recommended_res);
2670
2671 mAnalysisChannel = new QCamera3SupportChannel(
2672 mCameraHandle->camera_handle,
2673 mChannelHandle,
2674 mCameraHandle->ops,
2675 &analysisInfo.analysis_padding_info,
2676 analysisFeatureMask,
2677 CAM_STREAM_TYPE_ANALYSIS,
2678 &analysisDim,
2679 (analysisInfo.analysis_format
2680 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2681 : CAM_FORMAT_YUV_420_NV21),
2682 analysisInfo.hw_analysis_supported,
2683 gCamCapability[mCameraId]->color_arrangement,
2684 this,
2685 0); // force buffer count to 0
2686 } else {
2687 LOGW("getAnalysisInfo failed, ret = %d", ret);
2688 }
2689 if (!mAnalysisChannel) {
2690 LOGW("Analysis channel cannot be created");
2691 }
2692 }
2693
Thierry Strudel3d639192016-09-09 11:52:26 -07002694 //RAW DUMP channel
2695 if (mEnableRawDump && isRawStreamRequested == false){
2696 cam_dimension_t rawDumpSize;
2697 rawDumpSize = getMaxRawSize(mCameraId);
2698 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2699 setPAAFSupport(rawDumpFeatureMask,
2700 CAM_STREAM_TYPE_RAW,
2701 gCamCapability[mCameraId]->color_arrangement);
2702 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2703 mChannelHandle,
2704 mCameraHandle->ops,
2705 rawDumpSize,
2706 &padding_info,
2707 this, rawDumpFeatureMask);
2708 if (!mRawDumpChannel) {
2709 LOGE("Raw Dump channel cannot be created");
2710 pthread_mutex_unlock(&mMutex);
2711 return -ENOMEM;
2712 }
2713 }
2714
Thierry Strudel3d639192016-09-09 11:52:26 -07002715 if (mAnalysisChannel) {
2716 cam_analysis_info_t analysisInfo;
2717 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2718 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2719 CAM_STREAM_TYPE_ANALYSIS;
2720 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2721 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002722 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002723 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2724 &analysisInfo);
2725 if (rc != NO_ERROR) {
2726 LOGE("getAnalysisInfo failed, ret = %d", rc);
2727 pthread_mutex_unlock(&mMutex);
2728 return rc;
2729 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002730 cam_color_filter_arrangement_t analysis_color_arrangement =
2731 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2732 CAM_FILTER_ARRANGEMENT_Y :
2733 gCamCapability[mCameraId]->color_arrangement);
2734 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2735 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2736 analysis_color_arrangement);
2737
Thierry Strudel3d639192016-09-09 11:52:26 -07002738 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002739 mCommon.getMatchingDimension(previewSize,
2740 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002741 mStreamConfigInfo.num_streams++;
2742 }
2743
Thierry Strudel2896d122017-02-23 19:18:03 -08002744 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002745 cam_analysis_info_t supportInfo;
2746 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2747 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2748 setPAAFSupport(callbackFeatureMask,
2749 CAM_STREAM_TYPE_CALLBACK,
2750 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002751 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002752 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002753 if (ret != NO_ERROR) {
2754 /* Ignore the error for Mono camera
2755 * because the PAAF bit mask is only set
2756 * for CAM_STREAM_TYPE_ANALYSIS stream type
2757 */
2758 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2759 LOGW("getAnalysisInfo failed, ret = %d", ret);
2760 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002761 }
2762 mSupportChannel = new QCamera3SupportChannel(
2763 mCameraHandle->camera_handle,
2764 mChannelHandle,
2765 mCameraHandle->ops,
2766 &gCamCapability[mCameraId]->padding_info,
2767 callbackFeatureMask,
2768 CAM_STREAM_TYPE_CALLBACK,
2769 &QCamera3SupportChannel::kDim,
2770 CAM_FORMAT_YUV_420_NV21,
2771 supportInfo.hw_analysis_supported,
2772 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002773 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002774 if (!mSupportChannel) {
2775 LOGE("dummy channel cannot be created");
2776 pthread_mutex_unlock(&mMutex);
2777 return -ENOMEM;
2778 }
2779 }
2780
2781 if (mSupportChannel) {
2782 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2783 QCamera3SupportChannel::kDim;
2784 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2785 CAM_STREAM_TYPE_CALLBACK;
2786 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2787 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2788 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2789 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2790 gCamCapability[mCameraId]->color_arrangement);
2791 mStreamConfigInfo.num_streams++;
2792 }
2793
2794 if (mRawDumpChannel) {
2795 cam_dimension_t rawSize;
2796 rawSize = getMaxRawSize(mCameraId);
2797 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2798 rawSize;
2799 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2800 CAM_STREAM_TYPE_RAW;
2801 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2802 CAM_QCOM_FEATURE_NONE;
2803 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2804 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2805 gCamCapability[mCameraId]->color_arrangement);
2806 mStreamConfigInfo.num_streams++;
2807 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002808
2809 if (mHdrPlusRawSrcChannel) {
2810 cam_dimension_t rawSize;
2811 rawSize = getMaxRawSize(mCameraId);
2812 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2813 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2814 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2815 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2816 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2817 gCamCapability[mCameraId]->color_arrangement);
2818 mStreamConfigInfo.num_streams++;
2819 }
2820
Thierry Strudel3d639192016-09-09 11:52:26 -07002821 /* In HFR mode, if video stream is not added, create a dummy channel so that
2822 * ISP can create a batch mode even for preview only case. This channel is
2823 * never 'start'ed (no stream-on), it is only 'initialized' */
2824 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2825 !m_bIsVideo) {
2826 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2827 setPAAFSupport(dummyFeatureMask,
2828 CAM_STREAM_TYPE_VIDEO,
2829 gCamCapability[mCameraId]->color_arrangement);
2830 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2831 mChannelHandle,
2832 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002833 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002834 this,
2835 &mDummyBatchStream,
2836 CAM_STREAM_TYPE_VIDEO,
2837 dummyFeatureMask,
2838 mMetadataChannel);
2839 if (NULL == mDummyBatchChannel) {
2840 LOGE("creation of mDummyBatchChannel failed."
2841 "Preview will use non-hfr sensor mode ");
2842 }
2843 }
2844 if (mDummyBatchChannel) {
2845 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2846 mDummyBatchStream.width;
2847 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2848 mDummyBatchStream.height;
2849 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2850 CAM_STREAM_TYPE_VIDEO;
2851 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2852 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2853 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2854 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2855 gCamCapability[mCameraId]->color_arrangement);
2856 mStreamConfigInfo.num_streams++;
2857 }
2858
2859 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2860 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002861 m_bIs4KVideo ? 0 :
2862 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002863
2864 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2865 for (pendingRequestIterator i = mPendingRequestsList.begin();
2866 i != mPendingRequestsList.end();) {
2867 i = erasePendingRequest(i);
2868 }
2869 mPendingFrameDropList.clear();
2870 // Initialize/Reset the pending buffers list
2871 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2872 req.mPendingBufferList.clear();
2873 }
2874 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2875
Thierry Strudel3d639192016-09-09 11:52:26 -07002876 mCurJpegMeta.clear();
2877 //Get min frame duration for this streams configuration
2878 deriveMinFrameDuration();
2879
Chien-Yu Chenee335912017-02-09 17:53:20 -08002880 mFirstPreviewIntentSeen = false;
2881
2882 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002883 {
2884 Mutex::Autolock l(gHdrPlusClientLock);
2885 disableHdrPlusModeLocked();
2886 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002887
Thierry Strudel3d639192016-09-09 11:52:26 -07002888 // Update state
2889 mState = CONFIGURED;
2890
2891 pthread_mutex_unlock(&mMutex);
2892
2893 return rc;
2894}
2895
2896/*===========================================================================
2897 * FUNCTION : validateCaptureRequest
2898 *
2899 * DESCRIPTION: validate a capture request from camera service
2900 *
2901 * PARAMETERS :
2902 * @request : request from framework to process
2903 *
2904 * RETURN :
2905 *
2906 *==========================================================================*/
2907int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002908 camera3_capture_request_t *request,
2909 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002910{
2911 ssize_t idx = 0;
2912 const camera3_stream_buffer_t *b;
2913 CameraMetadata meta;
2914
2915 /* Sanity check the request */
2916 if (request == NULL) {
2917 LOGE("NULL capture request");
2918 return BAD_VALUE;
2919 }
2920
2921 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2922 /*settings cannot be null for the first request*/
2923 return BAD_VALUE;
2924 }
2925
2926 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002927 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2928 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002929 LOGE("Request %d: No output buffers provided!",
2930 __FUNCTION__, frameNumber);
2931 return BAD_VALUE;
2932 }
2933 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2934 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2935 request->num_output_buffers, MAX_NUM_STREAMS);
2936 return BAD_VALUE;
2937 }
2938 if (request->input_buffer != NULL) {
2939 b = request->input_buffer;
2940 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2941 LOGE("Request %d: Buffer %ld: Status not OK!",
2942 frameNumber, (long)idx);
2943 return BAD_VALUE;
2944 }
2945 if (b->release_fence != -1) {
2946 LOGE("Request %d: Buffer %ld: Has a release fence!",
2947 frameNumber, (long)idx);
2948 return BAD_VALUE;
2949 }
2950 if (b->buffer == NULL) {
2951 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2952 frameNumber, (long)idx);
2953 return BAD_VALUE;
2954 }
2955 }
2956
2957 // Validate all buffers
2958 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002959 if (b == NULL) {
2960 return BAD_VALUE;
2961 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002962 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002963 QCamera3ProcessingChannel *channel =
2964 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2965 if (channel == NULL) {
2966 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2967 frameNumber, (long)idx);
2968 return BAD_VALUE;
2969 }
2970 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2971 LOGE("Request %d: Buffer %ld: Status not OK!",
2972 frameNumber, (long)idx);
2973 return BAD_VALUE;
2974 }
2975 if (b->release_fence != -1) {
2976 LOGE("Request %d: Buffer %ld: Has a release fence!",
2977 frameNumber, (long)idx);
2978 return BAD_VALUE;
2979 }
2980 if (b->buffer == NULL) {
2981 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2982 frameNumber, (long)idx);
2983 return BAD_VALUE;
2984 }
2985 if (*(b->buffer) == NULL) {
2986 LOGE("Request %d: Buffer %ld: NULL private handle!",
2987 frameNumber, (long)idx);
2988 return BAD_VALUE;
2989 }
2990 idx++;
2991 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002992 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002993 return NO_ERROR;
2994}
2995
2996/*===========================================================================
2997 * FUNCTION : deriveMinFrameDuration
2998 *
2999 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3000 * on currently configured streams.
3001 *
3002 * PARAMETERS : NONE
3003 *
3004 * RETURN : NONE
3005 *
3006 *==========================================================================*/
3007void QCamera3HardwareInterface::deriveMinFrameDuration()
3008{
3009 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3010
3011 maxJpegDim = 0;
3012 maxProcessedDim = 0;
3013 maxRawDim = 0;
3014
3015 // Figure out maximum jpeg, processed, and raw dimensions
3016 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3017 it != mStreamInfo.end(); it++) {
3018
3019 // Input stream doesn't have valid stream_type
3020 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3021 continue;
3022
3023 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3024 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3025 if (dimension > maxJpegDim)
3026 maxJpegDim = dimension;
3027 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3028 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3029 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3030 if (dimension > maxRawDim)
3031 maxRawDim = dimension;
3032 } else {
3033 if (dimension > maxProcessedDim)
3034 maxProcessedDim = dimension;
3035 }
3036 }
3037
3038 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3039 MAX_SIZES_CNT);
3040
3041 //Assume all jpeg dimensions are in processed dimensions.
3042 if (maxJpegDim > maxProcessedDim)
3043 maxProcessedDim = maxJpegDim;
3044 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3045 if (maxProcessedDim > maxRawDim) {
3046 maxRawDim = INT32_MAX;
3047
3048 for (size_t i = 0; i < count; i++) {
3049 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3050 gCamCapability[mCameraId]->raw_dim[i].height;
3051 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3052 maxRawDim = dimension;
3053 }
3054 }
3055
3056 //Find minimum durations for processed, jpeg, and raw
3057 for (size_t i = 0; i < count; i++) {
3058 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3059 gCamCapability[mCameraId]->raw_dim[i].height) {
3060 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3061 break;
3062 }
3063 }
3064 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3065 for (size_t i = 0; i < count; i++) {
3066 if (maxProcessedDim ==
3067 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3068 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3069 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3070 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3071 break;
3072 }
3073 }
3074}
3075
3076/*===========================================================================
3077 * FUNCTION : getMinFrameDuration
3078 *
3079 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3080 * and current request configuration.
3081 *
3082 * PARAMETERS : @request: requset sent by the frameworks
3083 *
3084 * RETURN : min farme duration for a particular request
3085 *
3086 *==========================================================================*/
3087int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3088{
3089 bool hasJpegStream = false;
3090 bool hasRawStream = false;
3091 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3092 const camera3_stream_t *stream = request->output_buffers[i].stream;
3093 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3094 hasJpegStream = true;
3095 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3096 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3097 stream->format == HAL_PIXEL_FORMAT_RAW16)
3098 hasRawStream = true;
3099 }
3100
3101 if (!hasJpegStream)
3102 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3103 else
3104 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3105}
3106
3107/*===========================================================================
3108 * FUNCTION : handleBuffersDuringFlushLock
3109 *
3110 * DESCRIPTION: Account for buffers returned from back-end during flush
3111 * This function is executed while mMutex is held by the caller.
3112 *
3113 * PARAMETERS :
3114 * @buffer: image buffer for the callback
3115 *
3116 * RETURN :
3117 *==========================================================================*/
3118void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3119{
3120 bool buffer_found = false;
3121 for (List<PendingBuffersInRequest>::iterator req =
3122 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3123 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3124 for (List<PendingBufferInfo>::iterator i =
3125 req->mPendingBufferList.begin();
3126 i != req->mPendingBufferList.end(); i++) {
3127 if (i->buffer == buffer->buffer) {
3128 mPendingBuffersMap.numPendingBufsAtFlush--;
3129 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3130 buffer->buffer, req->frame_number,
3131 mPendingBuffersMap.numPendingBufsAtFlush);
3132 buffer_found = true;
3133 break;
3134 }
3135 }
3136 if (buffer_found) {
3137 break;
3138 }
3139 }
3140 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3141 //signal the flush()
3142 LOGD("All buffers returned to HAL. Continue flush");
3143 pthread_cond_signal(&mBuffersCond);
3144 }
3145}
3146
Thierry Strudel3d639192016-09-09 11:52:26 -07003147/*===========================================================================
3148 * FUNCTION : handleBatchMetadata
3149 *
3150 * DESCRIPTION: Handles metadata buffer callback in batch mode
3151 *
3152 * PARAMETERS : @metadata_buf: metadata buffer
3153 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3154 * the meta buf in this method
3155 *
3156 * RETURN :
3157 *
3158 *==========================================================================*/
3159void QCamera3HardwareInterface::handleBatchMetadata(
3160 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3161{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003162 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003163
3164 if (NULL == metadata_buf) {
3165 LOGE("metadata_buf is NULL");
3166 return;
3167 }
3168 /* In batch mode, the metdata will contain the frame number and timestamp of
3169 * the last frame in the batch. Eg: a batch containing buffers from request
3170 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3171 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3172 * multiple process_capture_results */
3173 metadata_buffer_t *metadata =
3174 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3175 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3176 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3177 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3178 uint32_t frame_number = 0, urgent_frame_number = 0;
3179 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3180 bool invalid_metadata = false;
3181 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3182 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003183 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003184
3185 int32_t *p_frame_number_valid =
3186 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3187 uint32_t *p_frame_number =
3188 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3189 int64_t *p_capture_time =
3190 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3191 int32_t *p_urgent_frame_number_valid =
3192 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3193 uint32_t *p_urgent_frame_number =
3194 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3195
3196 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3197 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3198 (NULL == p_urgent_frame_number)) {
3199 LOGE("Invalid metadata");
3200 invalid_metadata = true;
3201 } else {
3202 frame_number_valid = *p_frame_number_valid;
3203 last_frame_number = *p_frame_number;
3204 last_frame_capture_time = *p_capture_time;
3205 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3206 last_urgent_frame_number = *p_urgent_frame_number;
3207 }
3208
3209 /* In batchmode, when no video buffers are requested, set_parms are sent
3210 * for every capture_request. The difference between consecutive urgent
3211 * frame numbers and frame numbers should be used to interpolate the
3212 * corresponding frame numbers and time stamps */
3213 pthread_mutex_lock(&mMutex);
3214 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003215 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3216 if(idx < 0) {
3217 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3218 last_urgent_frame_number);
3219 mState = ERROR;
3220 pthread_mutex_unlock(&mMutex);
3221 return;
3222 }
3223 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003224 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3225 first_urgent_frame_number;
3226
3227 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3228 urgent_frame_number_valid,
3229 first_urgent_frame_number, last_urgent_frame_number);
3230 }
3231
3232 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003233 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3234 if(idx < 0) {
3235 LOGE("Invalid frame number received: %d. Irrecoverable error",
3236 last_frame_number);
3237 mState = ERROR;
3238 pthread_mutex_unlock(&mMutex);
3239 return;
3240 }
3241 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003242 frameNumDiff = last_frame_number + 1 -
3243 first_frame_number;
3244 mPendingBatchMap.removeItem(last_frame_number);
3245
3246 LOGD("frm: valid: %d frm_num: %d - %d",
3247 frame_number_valid,
3248 first_frame_number, last_frame_number);
3249
3250 }
3251 pthread_mutex_unlock(&mMutex);
3252
3253 if (urgent_frame_number_valid || frame_number_valid) {
3254 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3255 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3256 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3257 urgentFrameNumDiff, last_urgent_frame_number);
3258 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3259 LOGE("frameNumDiff: %d frameNum: %d",
3260 frameNumDiff, last_frame_number);
3261 }
3262
3263 for (size_t i = 0; i < loopCount; i++) {
3264 /* handleMetadataWithLock is called even for invalid_metadata for
3265 * pipeline depth calculation */
3266 if (!invalid_metadata) {
3267 /* Infer frame number. Batch metadata contains frame number of the
3268 * last frame */
3269 if (urgent_frame_number_valid) {
3270 if (i < urgentFrameNumDiff) {
3271 urgent_frame_number =
3272 first_urgent_frame_number + i;
3273 LOGD("inferred urgent frame_number: %d",
3274 urgent_frame_number);
3275 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3276 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3277 } else {
3278 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3279 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3280 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3281 }
3282 }
3283
3284 /* Infer frame number. Batch metadata contains frame number of the
3285 * last frame */
3286 if (frame_number_valid) {
3287 if (i < frameNumDiff) {
3288 frame_number = first_frame_number + i;
3289 LOGD("inferred frame_number: %d", frame_number);
3290 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3291 CAM_INTF_META_FRAME_NUMBER, frame_number);
3292 } else {
3293 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3294 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3295 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3296 }
3297 }
3298
3299 if (last_frame_capture_time) {
3300 //Infer timestamp
3301 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003302 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003303 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003304 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003305 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3306 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3307 LOGD("batch capture_time: %lld, capture_time: %lld",
3308 last_frame_capture_time, capture_time);
3309 }
3310 }
3311 pthread_mutex_lock(&mMutex);
3312 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003313 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003314 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3315 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003316 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003317 pthread_mutex_unlock(&mMutex);
3318 }
3319
3320 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003321 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003322 mMetadataChannel->bufDone(metadata_buf);
3323 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003324 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003325 }
3326}
3327
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003328void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3329 camera3_error_msg_code_t errorCode)
3330{
3331 camera3_notify_msg_t notify_msg;
3332 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3333 notify_msg.type = CAMERA3_MSG_ERROR;
3334 notify_msg.message.error.error_code = errorCode;
3335 notify_msg.message.error.error_stream = NULL;
3336 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003337 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003338
3339 return;
3340}
Thierry Strudel3d639192016-09-09 11:52:26 -07003341/*===========================================================================
3342 * FUNCTION : handleMetadataWithLock
3343 *
3344 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3345 *
3346 * PARAMETERS : @metadata_buf: metadata buffer
3347 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3348 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003349 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3350 * last urgent metadata in a batch. Always true for non-batch mode
3351 * @lastMetadataInBatch: Boolean to indicate whether this is the
3352 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003353 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3354 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003355 *
3356 * RETURN :
3357 *
3358 *==========================================================================*/
3359void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003360 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003361 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3362 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003363{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003364 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003365 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3366 //during flush do not send metadata from this thread
3367 LOGD("not sending metadata during flush or when mState is error");
3368 if (free_and_bufdone_meta_buf) {
3369 mMetadataChannel->bufDone(metadata_buf);
3370 free(metadata_buf);
3371 }
3372 return;
3373 }
3374
3375 //not in flush
3376 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3377 int32_t frame_number_valid, urgent_frame_number_valid;
3378 uint32_t frame_number, urgent_frame_number;
3379 int64_t capture_time;
3380 nsecs_t currentSysTime;
3381
3382 int32_t *p_frame_number_valid =
3383 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3384 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3385 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3386 int32_t *p_urgent_frame_number_valid =
3387 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3388 uint32_t *p_urgent_frame_number =
3389 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3390 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3391 metadata) {
3392 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3393 *p_frame_number_valid, *p_frame_number);
3394 }
3395
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003396 camera_metadata_t *resultMetadata = nullptr;
3397
Thierry Strudel3d639192016-09-09 11:52:26 -07003398 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3399 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3400 LOGE("Invalid metadata");
3401 if (free_and_bufdone_meta_buf) {
3402 mMetadataChannel->bufDone(metadata_buf);
3403 free(metadata_buf);
3404 }
3405 goto done_metadata;
3406 }
3407 frame_number_valid = *p_frame_number_valid;
3408 frame_number = *p_frame_number;
3409 capture_time = *p_capture_time;
3410 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3411 urgent_frame_number = *p_urgent_frame_number;
3412 currentSysTime = systemTime(CLOCK_MONOTONIC);
3413
3414 // Detect if buffers from any requests are overdue
3415 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003416 int64_t timeout;
3417 {
3418 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3419 // If there is a pending HDR+ request, the following requests may be blocked until the
3420 // HDR+ request is done. So allow a longer timeout.
3421 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3422 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3423 }
3424
3425 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003426 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003427 assert(missed.stream->priv);
3428 if (missed.stream->priv) {
3429 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3430 assert(ch->mStreams[0]);
3431 if (ch->mStreams[0]) {
3432 LOGE("Cancel missing frame = %d, buffer = %p,"
3433 "stream type = %d, stream format = %d",
3434 req.frame_number, missed.buffer,
3435 ch->mStreams[0]->getMyType(), missed.stream->format);
3436 ch->timeoutFrame(req.frame_number);
3437 }
3438 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003439 }
3440 }
3441 }
3442 //Partial result on process_capture_result for timestamp
3443 if (urgent_frame_number_valid) {
3444 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3445 urgent_frame_number, capture_time);
3446
3447 //Recieved an urgent Frame Number, handle it
3448 //using partial results
3449 for (pendingRequestIterator i =
3450 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3451 LOGD("Iterator Frame = %d urgent frame = %d",
3452 i->frame_number, urgent_frame_number);
3453
3454 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3455 (i->partial_result_cnt == 0)) {
3456 LOGE("Error: HAL missed urgent metadata for frame number %d",
3457 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003458 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003459 }
3460
3461 if (i->frame_number == urgent_frame_number &&
3462 i->bUrgentReceived == 0) {
3463
3464 camera3_capture_result_t result;
3465 memset(&result, 0, sizeof(camera3_capture_result_t));
3466
3467 i->partial_result_cnt++;
3468 i->bUrgentReceived = 1;
3469 // Extract 3A metadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003470 result.result = translateCbUrgentMetadataToResultMetadata(
3471 metadata, lastUrgentMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003472 // Populate metadata result
3473 result.frame_number = urgent_frame_number;
3474 result.num_output_buffers = 0;
3475 result.output_buffers = NULL;
3476 result.partial_result = i->partial_result_cnt;
3477
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003478 {
3479 Mutex::Autolock l(gHdrPlusClientLock);
3480 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3481 // Notify HDR+ client about the partial metadata.
3482 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3483 result.partial_result == PARTIAL_RESULT_COUNT);
3484 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003485 }
3486
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003487 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003488 LOGD("urgent frame_number = %u, capture_time = %lld",
3489 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003490 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3491 // Instant AEC settled for this frame.
3492 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3493 mInstantAECSettledFrameNumber = urgent_frame_number;
3494 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 free_camera_metadata((camera_metadata_t *)result.result);
3496 break;
3497 }
3498 }
3499 }
3500
3501 if (!frame_number_valid) {
3502 LOGD("Not a valid normal frame number, used as SOF only");
3503 if (free_and_bufdone_meta_buf) {
3504 mMetadataChannel->bufDone(metadata_buf);
3505 free(metadata_buf);
3506 }
3507 goto done_metadata;
3508 }
3509 LOGH("valid frame_number = %u, capture_time = %lld",
3510 frame_number, capture_time);
3511
Emilian Peev7650c122017-01-19 08:24:33 -08003512 if (metadata->is_depth_data_valid) {
3513 handleDepthDataLocked(metadata->depth_data, frame_number);
3514 }
3515
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003516 // Check whether any stream buffer corresponding to this is dropped or not
3517 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3518 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3519 for (auto & pendingRequest : mPendingRequestsList) {
3520 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3521 mInstantAECSettledFrameNumber)) {
3522 camera3_notify_msg_t notify_msg = {};
3523 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003524 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003525 QCamera3ProcessingChannel *channel =
3526 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003527 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003528 if (p_cam_frame_drop) {
3529 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003530 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003531 // Got the stream ID for drop frame.
3532 dropFrame = true;
3533 break;
3534 }
3535 }
3536 } else {
3537 // This is instant AEC case.
3538 // For instant AEC drop the stream untill AEC is settled.
3539 dropFrame = true;
3540 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003541
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003542 if (dropFrame) {
3543 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3544 if (p_cam_frame_drop) {
3545 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003546 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003547 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003548 } else {
3549 // For instant AEC, inform frame drop and frame number
3550 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3551 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003552 pendingRequest.frame_number, streamID,
3553 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003554 }
3555 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003556 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003557 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003558 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003559 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003560 if (p_cam_frame_drop) {
3561 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003562 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003563 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003564 } else {
3565 // For instant AEC, inform frame drop and frame number
3566 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3567 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003568 pendingRequest.frame_number, streamID,
3569 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003570 }
3571 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003572 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003573 PendingFrameDrop.stream_ID = streamID;
3574 // Add the Frame drop info to mPendingFrameDropList
3575 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003576 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003577 }
3578 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003579 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003580
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003581 for (auto & pendingRequest : mPendingRequestsList) {
3582 // Find the pending request with the frame number.
3583 if (pendingRequest.frame_number == frame_number) {
3584 // Update the sensor timestamp.
3585 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003586
Thierry Strudel3d639192016-09-09 11:52:26 -07003587
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003588 /* Set the timestamp in display metadata so that clients aware of
3589 private_handle such as VT can use this un-modified timestamps.
3590 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003591 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003592
Thierry Strudel3d639192016-09-09 11:52:26 -07003593 // Find channel requiring metadata, meaning internal offline postprocess
3594 // is needed.
3595 //TODO: for now, we don't support two streams requiring metadata at the same time.
3596 // (because we are not making copies, and metadata buffer is not reference counted.
3597 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003598 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3599 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003600 if (iter->need_metadata) {
3601 internalPproc = true;
3602 QCamera3ProcessingChannel *channel =
3603 (QCamera3ProcessingChannel *)iter->stream->priv;
3604 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003605 if(p_is_metabuf_queued != NULL) {
3606 *p_is_metabuf_queued = true;
3607 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003608 break;
3609 }
3610 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003611 for (auto itr = pendingRequest.internalRequestList.begin();
3612 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003613 if (itr->need_metadata) {
3614 internalPproc = true;
3615 QCamera3ProcessingChannel *channel =
3616 (QCamera3ProcessingChannel *)itr->stream->priv;
3617 channel->queueReprocMetadata(metadata_buf);
3618 break;
3619 }
3620 }
3621
Thierry Strudel54dc9782017-02-15 12:12:10 -08003622 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003623
3624 bool *enableZsl = nullptr;
3625 if (gExposeEnableZslKey) {
3626 enableZsl = &pendingRequest.enableZsl;
3627 }
3628
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003629 resultMetadata = translateFromHalMetadata(metadata,
3630 pendingRequest.timestamp, pendingRequest.request_id,
3631 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3632 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003633 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003634 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003635 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003636 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003637 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003638 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003639
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003640 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003641
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003642 if (pendingRequest.blob_request) {
3643 //Dump tuning metadata if enabled and available
3644 char prop[PROPERTY_VALUE_MAX];
3645 memset(prop, 0, sizeof(prop));
3646 property_get("persist.camera.dumpmetadata", prop, "0");
3647 int32_t enabled = atoi(prop);
3648 if (enabled && metadata->is_tuning_params_valid) {
3649 dumpMetadataToFile(metadata->tuning_params,
3650 mMetaFrameCount,
3651 enabled,
3652 "Snapshot",
3653 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003654 }
3655 }
3656
3657 if (!internalPproc) {
3658 LOGD("couldn't find need_metadata for this metadata");
3659 // Return metadata buffer
3660 if (free_and_bufdone_meta_buf) {
3661 mMetadataChannel->bufDone(metadata_buf);
3662 free(metadata_buf);
3663 }
3664 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003665
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003666 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003667 }
3668 }
3669
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003670 // Try to send out shutter callbacks and capture results.
3671 handlePendingResultsWithLock(frame_number, resultMetadata);
3672 return;
3673
Thierry Strudel3d639192016-09-09 11:52:26 -07003674done_metadata:
3675 for (pendingRequestIterator i = mPendingRequestsList.begin();
3676 i != mPendingRequestsList.end() ;i++) {
3677 i->pipeline_depth++;
3678 }
3679 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3680 unblockRequestIfNecessary();
3681}
3682
3683/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003684 * FUNCTION : handleDepthDataWithLock
3685 *
3686 * DESCRIPTION: Handles incoming depth data
3687 *
3688 * PARAMETERS : @depthData : Depth data
3689 * @frameNumber: Frame number of the incoming depth data
3690 *
3691 * RETURN :
3692 *
3693 *==========================================================================*/
3694void QCamera3HardwareInterface::handleDepthDataLocked(
3695 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3696 uint32_t currentFrameNumber;
3697 buffer_handle_t *depthBuffer;
3698
3699 if (nullptr == mDepthChannel) {
3700 LOGE("Depth channel not present!");
3701 return;
3702 }
3703
3704 camera3_stream_buffer_t resultBuffer =
3705 {.acquire_fence = -1,
3706 .release_fence = -1,
3707 .status = CAMERA3_BUFFER_STATUS_OK,
3708 .buffer = nullptr,
3709 .stream = mDepthChannel->getStream()};
3710 camera3_capture_result_t result =
3711 {.result = nullptr,
3712 .num_output_buffers = 1,
3713 .output_buffers = &resultBuffer,
3714 .partial_result = 0,
3715 .frame_number = 0};
3716
3717 do {
3718 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3719 if (nullptr == depthBuffer) {
3720 break;
3721 }
3722
3723 result.frame_number = currentFrameNumber;
3724 resultBuffer.buffer = depthBuffer;
3725 if (currentFrameNumber == frameNumber) {
3726 int32_t rc = mDepthChannel->populateDepthData(depthData,
3727 frameNumber);
3728 if (NO_ERROR != rc) {
3729 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3730 } else {
3731 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3732 }
3733 } else if (currentFrameNumber > frameNumber) {
3734 break;
3735 } else {
3736 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3737 {{currentFrameNumber, mDepthChannel->getStream(),
3738 CAMERA3_MSG_ERROR_BUFFER}}};
3739 orchestrateNotify(&notify_msg);
3740
3741 LOGE("Depth buffer for frame number: %d is missing "
3742 "returning back!", currentFrameNumber);
3743 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3744 }
3745 mDepthChannel->unmapBuffer(currentFrameNumber);
3746
3747 orchestrateResult(&result);
3748 } while (currentFrameNumber < frameNumber);
3749}
3750
3751/*===========================================================================
3752 * FUNCTION : notifyErrorFoPendingDepthData
3753 *
3754 * DESCRIPTION: Returns error for any pending depth buffers
3755 *
3756 * PARAMETERS : depthCh - depth channel that needs to get flushed
3757 *
3758 * RETURN :
3759 *
3760 *==========================================================================*/
3761void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3762 QCamera3DepthChannel *depthCh) {
3763 uint32_t currentFrameNumber;
3764 buffer_handle_t *depthBuffer;
3765
3766 if (nullptr == depthCh) {
3767 return;
3768 }
3769
3770 camera3_notify_msg_t notify_msg =
3771 {.type = CAMERA3_MSG_ERROR,
3772 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3773 camera3_stream_buffer_t resultBuffer =
3774 {.acquire_fence = -1,
3775 .release_fence = -1,
3776 .buffer = nullptr,
3777 .stream = depthCh->getStream(),
3778 .status = CAMERA3_BUFFER_STATUS_ERROR};
3779 camera3_capture_result_t result =
3780 {.result = nullptr,
3781 .frame_number = 0,
3782 .num_output_buffers = 1,
3783 .partial_result = 0,
3784 .output_buffers = &resultBuffer};
3785
3786 while (nullptr !=
3787 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3788 depthCh->unmapBuffer(currentFrameNumber);
3789
3790 notify_msg.message.error.frame_number = currentFrameNumber;
3791 orchestrateNotify(&notify_msg);
3792
3793 resultBuffer.buffer = depthBuffer;
3794 result.frame_number = currentFrameNumber;
3795 orchestrateResult(&result);
3796 };
3797}
3798
3799/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003800 * FUNCTION : hdrPlusPerfLock
3801 *
3802 * DESCRIPTION: perf lock for HDR+ using custom intent
3803 *
3804 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3805 *
3806 * RETURN : None
3807 *
3808 *==========================================================================*/
3809void QCamera3HardwareInterface::hdrPlusPerfLock(
3810 mm_camera_super_buf_t *metadata_buf)
3811{
3812 if (NULL == metadata_buf) {
3813 LOGE("metadata_buf is NULL");
3814 return;
3815 }
3816 metadata_buffer_t *metadata =
3817 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3818 int32_t *p_frame_number_valid =
3819 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3820 uint32_t *p_frame_number =
3821 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3822
3823 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3824 LOGE("%s: Invalid metadata", __func__);
3825 return;
3826 }
3827
3828 //acquire perf lock for 5 sec after the last HDR frame is captured
3829 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3830 if ((p_frame_number != NULL) &&
3831 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003832 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003833 }
3834 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003835}
3836
3837/*===========================================================================
3838 * FUNCTION : handleInputBufferWithLock
3839 *
3840 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3841 *
3842 * PARAMETERS : @frame_number: frame number of the input buffer
3843 *
3844 * RETURN :
3845 *
3846 *==========================================================================*/
3847void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3848{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003849 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003850 pendingRequestIterator i = mPendingRequestsList.begin();
3851 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3852 i++;
3853 }
3854 if (i != mPendingRequestsList.end() && i->input_buffer) {
3855 //found the right request
3856 if (!i->shutter_notified) {
3857 CameraMetadata settings;
3858 camera3_notify_msg_t notify_msg;
3859 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3860 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3861 if(i->settings) {
3862 settings = i->settings;
3863 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3864 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3865 } else {
3866 LOGE("No timestamp in input settings! Using current one.");
3867 }
3868 } else {
3869 LOGE("Input settings missing!");
3870 }
3871
3872 notify_msg.type = CAMERA3_MSG_SHUTTER;
3873 notify_msg.message.shutter.frame_number = frame_number;
3874 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003875 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003876 i->shutter_notified = true;
3877 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3878 i->frame_number, notify_msg.message.shutter.timestamp);
3879 }
3880
3881 if (i->input_buffer->release_fence != -1) {
3882 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3883 close(i->input_buffer->release_fence);
3884 if (rc != OK) {
3885 LOGE("input buffer sync wait failed %d", rc);
3886 }
3887 }
3888
3889 camera3_capture_result result;
3890 memset(&result, 0, sizeof(camera3_capture_result));
3891 result.frame_number = frame_number;
3892 result.result = i->settings;
3893 result.input_buffer = i->input_buffer;
3894 result.partial_result = PARTIAL_RESULT_COUNT;
3895
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003896 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003897 LOGD("Input request metadata and input buffer frame_number = %u",
3898 i->frame_number);
3899 i = erasePendingRequest(i);
3900 } else {
3901 LOGE("Could not find input request for frame number %d", frame_number);
3902 }
3903}
3904
3905/*===========================================================================
3906 * FUNCTION : handleBufferWithLock
3907 *
3908 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3909 *
3910 * PARAMETERS : @buffer: image buffer for the callback
3911 * @frame_number: frame number of the image buffer
3912 *
3913 * RETURN :
3914 *
3915 *==========================================================================*/
3916void QCamera3HardwareInterface::handleBufferWithLock(
3917 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3918{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003919 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003920
3921 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3922 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3923 }
3924
Thierry Strudel3d639192016-09-09 11:52:26 -07003925 /* Nothing to be done during error state */
3926 if ((ERROR == mState) || (DEINIT == mState)) {
3927 return;
3928 }
3929 if (mFlushPerf) {
3930 handleBuffersDuringFlushLock(buffer);
3931 return;
3932 }
3933 //not in flush
3934 // If the frame number doesn't exist in the pending request list,
3935 // directly send the buffer to the frameworks, and update pending buffers map
3936 // Otherwise, book-keep the buffer.
3937 pendingRequestIterator i = mPendingRequestsList.begin();
3938 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3939 i++;
3940 }
3941 if (i == mPendingRequestsList.end()) {
3942 // Verify all pending requests frame_numbers are greater
3943 for (pendingRequestIterator j = mPendingRequestsList.begin();
3944 j != mPendingRequestsList.end(); j++) {
3945 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3946 LOGW("Error: pending live frame number %d is smaller than %d",
3947 j->frame_number, frame_number);
3948 }
3949 }
3950 camera3_capture_result_t result;
3951 memset(&result, 0, sizeof(camera3_capture_result_t));
3952 result.result = NULL;
3953 result.frame_number = frame_number;
3954 result.num_output_buffers = 1;
3955 result.partial_result = 0;
3956 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3957 m != mPendingFrameDropList.end(); m++) {
3958 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3959 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3960 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3961 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3962 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3963 frame_number, streamID);
3964 m = mPendingFrameDropList.erase(m);
3965 break;
3966 }
3967 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003968 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003969 result.output_buffers = buffer;
3970 LOGH("result frame_number = %d, buffer = %p",
3971 frame_number, buffer->buffer);
3972
3973 mPendingBuffersMap.removeBuf(buffer->buffer);
3974
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003975 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003976 } else {
3977 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003978 if (i->input_buffer->release_fence != -1) {
3979 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3980 close(i->input_buffer->release_fence);
3981 if (rc != OK) {
3982 LOGE("input buffer sync wait failed %d", rc);
3983 }
3984 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003985 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003986
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003987 // Put buffer into the pending request
3988 for (auto &requestedBuffer : i->buffers) {
3989 if (requestedBuffer.stream == buffer->stream) {
3990 if (requestedBuffer.buffer != nullptr) {
3991 LOGE("Error: buffer is already set");
3992 } else {
3993 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3994 sizeof(camera3_stream_buffer_t));
3995 *(requestedBuffer.buffer) = *buffer;
3996 LOGH("cache buffer %p at result frame_number %u",
3997 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003998 }
3999 }
4000 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004001
4002 if (i->input_buffer) {
4003 // For a reprocessing request, try to send out shutter callback and result metadata.
4004 handlePendingResultsWithLock(frame_number, nullptr);
4005 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004006 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004007
4008 if (mPreviewStarted == false) {
4009 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4010 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004011 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4012
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004013 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4014 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4015 mPreviewStarted = true;
4016
4017 // Set power hint for preview
4018 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4019 }
4020 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004021}
4022
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004023void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4024 const camera_metadata_t *resultMetadata)
4025{
4026 // Find the pending request for this result metadata.
4027 auto requestIter = mPendingRequestsList.begin();
4028 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4029 requestIter++;
4030 }
4031
4032 if (requestIter == mPendingRequestsList.end()) {
4033 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4034 return;
4035 }
4036
4037 // Update the result metadata
4038 requestIter->resultMetadata = resultMetadata;
4039
4040 // Check what type of request this is.
4041 bool liveRequest = false;
4042 if (requestIter->hdrplus) {
4043 // HDR+ request doesn't have partial results.
4044 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4045 } else if (requestIter->input_buffer != nullptr) {
4046 // Reprocessing request result is the same as settings.
4047 requestIter->resultMetadata = requestIter->settings;
4048 // Reprocessing request doesn't have partial results.
4049 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4050 } else {
4051 liveRequest = true;
4052 requestIter->partial_result_cnt++;
4053 mPendingLiveRequest--;
4054
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004055 {
4056 Mutex::Autolock l(gHdrPlusClientLock);
4057 // For a live request, send the metadata to HDR+ client.
4058 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4059 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4060 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4061 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004062 }
4063 }
4064
4065 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4066 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4067 bool readyToSend = true;
4068
4069 // Iterate through the pending requests to send out shutter callbacks and results that are
4070 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4071 // live requests that don't have result metadata yet.
4072 auto iter = mPendingRequestsList.begin();
4073 while (iter != mPendingRequestsList.end()) {
4074 // Check if current pending request is ready. If it's not ready, the following pending
4075 // requests are also not ready.
4076 if (readyToSend && iter->resultMetadata == nullptr) {
4077 readyToSend = false;
4078 }
4079
4080 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4081
4082 std::vector<camera3_stream_buffer_t> outputBuffers;
4083
4084 camera3_capture_result_t result = {};
4085 result.frame_number = iter->frame_number;
4086 result.result = iter->resultMetadata;
4087 result.partial_result = iter->partial_result_cnt;
4088
4089 // If this pending buffer has result metadata, we may be able to send out shutter callback
4090 // and result metadata.
4091 if (iter->resultMetadata != nullptr) {
4092 if (!readyToSend) {
4093 // If any of the previous pending request is not ready, this pending request is
4094 // also not ready to send in order to keep shutter callbacks and result metadata
4095 // in order.
4096 iter++;
4097 continue;
4098 }
4099
4100 // Invoke shutter callback if not yet.
4101 if (!iter->shutter_notified) {
4102 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4103
4104 // Find the timestamp in HDR+ result metadata
4105 camera_metadata_ro_entry_t entry;
4106 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4107 ANDROID_SENSOR_TIMESTAMP, &entry);
4108 if (res != OK) {
4109 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4110 __FUNCTION__, iter->frame_number, strerror(-res), res);
4111 } else {
4112 timestamp = entry.data.i64[0];
4113 }
4114
4115 camera3_notify_msg_t notify_msg = {};
4116 notify_msg.type = CAMERA3_MSG_SHUTTER;
4117 notify_msg.message.shutter.frame_number = iter->frame_number;
4118 notify_msg.message.shutter.timestamp = timestamp;
4119 orchestrateNotify(&notify_msg);
4120 iter->shutter_notified = true;
4121 }
4122
4123 result.input_buffer = iter->input_buffer;
4124
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004125 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4126 // If the result metadata belongs to a live request, notify errors for previous pending
4127 // live requests.
4128 mPendingLiveRequest--;
4129
4130 CameraMetadata dummyMetadata;
4131 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4132 result.result = dummyMetadata.release();
4133
4134 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004135
4136 // partial_result should be PARTIAL_RESULT_CNT in case of
4137 // ERROR_RESULT.
4138 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4139 result.partial_result = PARTIAL_RESULT_COUNT;
4140
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004141 } else {
4142 iter++;
4143 continue;
4144 }
4145
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004146 // Prepare output buffer array
4147 for (auto bufferInfoIter = iter->buffers.begin();
4148 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4149 if (bufferInfoIter->buffer != nullptr) {
4150
4151 QCamera3Channel *channel =
4152 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4153 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4154
4155 // Check if this buffer is a dropped frame.
4156 auto frameDropIter = mPendingFrameDropList.begin();
4157 while (frameDropIter != mPendingFrameDropList.end()) {
4158 if((frameDropIter->stream_ID == streamID) &&
4159 (frameDropIter->frame_number == frameNumber)) {
4160 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4161 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4162 streamID);
4163 mPendingFrameDropList.erase(frameDropIter);
4164 break;
4165 } else {
4166 frameDropIter++;
4167 }
4168 }
4169
4170 // Check buffer error status
4171 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4172 bufferInfoIter->buffer->buffer);
4173 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4174
4175 outputBuffers.push_back(*(bufferInfoIter->buffer));
4176 free(bufferInfoIter->buffer);
4177 bufferInfoIter->buffer = NULL;
4178 }
4179 }
4180
4181 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4182 result.num_output_buffers = outputBuffers.size();
4183
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004184 orchestrateResult(&result);
4185
4186 // For reprocessing, result metadata is the same as settings so do not free it here to
4187 // avoid double free.
4188 if (result.result != iter->settings) {
4189 free_camera_metadata((camera_metadata_t *)result.result);
4190 }
4191 iter->resultMetadata = nullptr;
4192 iter = erasePendingRequest(iter);
4193 }
4194
4195 if (liveRequest) {
4196 for (auto &iter : mPendingRequestsList) {
4197 // Increment pipeline depth for the following pending requests.
4198 if (iter.frame_number > frameNumber) {
4199 iter.pipeline_depth++;
4200 }
4201 }
4202 }
4203
4204 unblockRequestIfNecessary();
4205}
4206
Thierry Strudel3d639192016-09-09 11:52:26 -07004207/*===========================================================================
4208 * FUNCTION : unblockRequestIfNecessary
4209 *
4210 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4211 * that mMutex is held when this function is called.
4212 *
4213 * PARAMETERS :
4214 *
4215 * RETURN :
4216 *
4217 *==========================================================================*/
4218void QCamera3HardwareInterface::unblockRequestIfNecessary()
4219{
4220 // Unblock process_capture_request
4221 pthread_cond_signal(&mRequestCond);
4222}
4223
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004224/*===========================================================================
4225 * FUNCTION : isHdrSnapshotRequest
4226 *
4227 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4228 *
4229 * PARAMETERS : camera3 request structure
4230 *
4231 * RETURN : boolean decision variable
4232 *
4233 *==========================================================================*/
4234bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4235{
4236 if (request == NULL) {
4237 LOGE("Invalid request handle");
4238 assert(0);
4239 return false;
4240 }
4241
4242 if (!mForceHdrSnapshot) {
4243 CameraMetadata frame_settings;
4244 frame_settings = request->settings;
4245
4246 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4247 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4248 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4249 return false;
4250 }
4251 } else {
4252 return false;
4253 }
4254
4255 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4256 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4257 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4258 return false;
4259 }
4260 } else {
4261 return false;
4262 }
4263 }
4264
4265 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4266 if (request->output_buffers[i].stream->format
4267 == HAL_PIXEL_FORMAT_BLOB) {
4268 return true;
4269 }
4270 }
4271
4272 return false;
4273}
4274/*===========================================================================
4275 * FUNCTION : orchestrateRequest
4276 *
4277 * DESCRIPTION: Orchestrates a capture request from camera service
4278 *
4279 * PARAMETERS :
4280 * @request : request from framework to process
4281 *
4282 * RETURN : Error status codes
4283 *
4284 *==========================================================================*/
4285int32_t QCamera3HardwareInterface::orchestrateRequest(
4286 camera3_capture_request_t *request)
4287{
4288
4289 uint32_t originalFrameNumber = request->frame_number;
4290 uint32_t originalOutputCount = request->num_output_buffers;
4291 const camera_metadata_t *original_settings = request->settings;
4292 List<InternalRequest> internallyRequestedStreams;
4293 List<InternalRequest> emptyInternalList;
4294
4295 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4296 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4297 uint32_t internalFrameNumber;
4298 CameraMetadata modified_meta;
4299
4300
4301 /* Add Blob channel to list of internally requested streams */
4302 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4303 if (request->output_buffers[i].stream->format
4304 == HAL_PIXEL_FORMAT_BLOB) {
4305 InternalRequest streamRequested;
4306 streamRequested.meteringOnly = 1;
4307 streamRequested.need_metadata = 0;
4308 streamRequested.stream = request->output_buffers[i].stream;
4309 internallyRequestedStreams.push_back(streamRequested);
4310 }
4311 }
4312 request->num_output_buffers = 0;
4313 auto itr = internallyRequestedStreams.begin();
4314
4315 /* Modify setting to set compensation */
4316 modified_meta = request->settings;
4317 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4318 uint8_t aeLock = 1;
4319 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4320 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4321 camera_metadata_t *modified_settings = modified_meta.release();
4322 request->settings = modified_settings;
4323
4324 /* Capture Settling & -2x frame */
4325 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4326 request->frame_number = internalFrameNumber;
4327 processCaptureRequest(request, internallyRequestedStreams);
4328
4329 request->num_output_buffers = originalOutputCount;
4330 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4331 request->frame_number = internalFrameNumber;
4332 processCaptureRequest(request, emptyInternalList);
4333 request->num_output_buffers = 0;
4334
4335 modified_meta = modified_settings;
4336 expCompensation = 0;
4337 aeLock = 1;
4338 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4339 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4340 modified_settings = modified_meta.release();
4341 request->settings = modified_settings;
4342
4343 /* Capture Settling & 0X frame */
4344
4345 itr = internallyRequestedStreams.begin();
4346 if (itr == internallyRequestedStreams.end()) {
4347 LOGE("Error Internally Requested Stream list is empty");
4348 assert(0);
4349 } else {
4350 itr->need_metadata = 0;
4351 itr->meteringOnly = 1;
4352 }
4353
4354 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4355 request->frame_number = internalFrameNumber;
4356 processCaptureRequest(request, internallyRequestedStreams);
4357
4358 itr = internallyRequestedStreams.begin();
4359 if (itr == internallyRequestedStreams.end()) {
4360 ALOGE("Error Internally Requested Stream list is empty");
4361 assert(0);
4362 } else {
4363 itr->need_metadata = 1;
4364 itr->meteringOnly = 0;
4365 }
4366
4367 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4368 request->frame_number = internalFrameNumber;
4369 processCaptureRequest(request, internallyRequestedStreams);
4370
4371 /* Capture 2X frame*/
4372 modified_meta = modified_settings;
4373 expCompensation = GB_HDR_2X_STEP_EV;
4374 aeLock = 1;
4375 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4376 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4377 modified_settings = modified_meta.release();
4378 request->settings = modified_settings;
4379
4380 itr = internallyRequestedStreams.begin();
4381 if (itr == internallyRequestedStreams.end()) {
4382 ALOGE("Error Internally Requested Stream list is empty");
4383 assert(0);
4384 } else {
4385 itr->need_metadata = 0;
4386 itr->meteringOnly = 1;
4387 }
4388 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4389 request->frame_number = internalFrameNumber;
4390 processCaptureRequest(request, internallyRequestedStreams);
4391
4392 itr = internallyRequestedStreams.begin();
4393 if (itr == internallyRequestedStreams.end()) {
4394 ALOGE("Error Internally Requested Stream list is empty");
4395 assert(0);
4396 } else {
4397 itr->need_metadata = 1;
4398 itr->meteringOnly = 0;
4399 }
4400
4401 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4402 request->frame_number = internalFrameNumber;
4403 processCaptureRequest(request, internallyRequestedStreams);
4404
4405
4406 /* Capture 2X on original streaming config*/
4407 internallyRequestedStreams.clear();
4408
4409 /* Restore original settings pointer */
4410 request->settings = original_settings;
4411 } else {
4412 uint32_t internalFrameNumber;
4413 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4414 request->frame_number = internalFrameNumber;
4415 return processCaptureRequest(request, internallyRequestedStreams);
4416 }
4417
4418 return NO_ERROR;
4419}
4420
4421/*===========================================================================
4422 * FUNCTION : orchestrateResult
4423 *
4424 * DESCRIPTION: Orchestrates a capture result to camera service
4425 *
4426 * PARAMETERS :
4427 * @request : request from framework to process
4428 *
4429 * RETURN :
4430 *
4431 *==========================================================================*/
4432void QCamera3HardwareInterface::orchestrateResult(
4433 camera3_capture_result_t *result)
4434{
4435 uint32_t frameworkFrameNumber;
4436 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4437 frameworkFrameNumber);
4438 if (rc != NO_ERROR) {
4439 LOGE("Cannot find translated frameworkFrameNumber");
4440 assert(0);
4441 } else {
4442 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004443 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004444 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004445 if (result->result != NULL) {
4446 CameraMetadata metadata;
4447 metadata.acquire((camera_metadata_t *)result->result);
4448 if (metadata.exists(ANDROID_SYNC_FRAME_NUMBER)) {
4449 int64_t sync_frame_number = frameworkFrameNumber;
4450 metadata.update(ANDROID_SYNC_FRAME_NUMBER, &sync_frame_number, 1);
4451 }
4452 result->result = metadata.release();
4453 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004454 result->frame_number = frameworkFrameNumber;
4455 mCallbackOps->process_capture_result(mCallbackOps, result);
4456 }
4457 }
4458}
4459
4460/*===========================================================================
4461 * FUNCTION : orchestrateNotify
4462 *
4463 * DESCRIPTION: Orchestrates a notify to camera service
4464 *
4465 * PARAMETERS :
4466 * @request : request from framework to process
4467 *
4468 * RETURN :
4469 *
4470 *==========================================================================*/
4471void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4472{
4473 uint32_t frameworkFrameNumber;
4474 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004475 int32_t rc = NO_ERROR;
4476
4477 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004478 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004479
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004480 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004481 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4482 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4483 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004484 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004485 LOGE("Cannot find translated frameworkFrameNumber");
4486 assert(0);
4487 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004488 }
4489 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004490
4491 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4492 LOGD("Internal Request drop the notifyCb");
4493 } else {
4494 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4495 mCallbackOps->notify(mCallbackOps, notify_msg);
4496 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004497}
4498
4499/*===========================================================================
4500 * FUNCTION : FrameNumberRegistry
4501 *
4502 * DESCRIPTION: Constructor
4503 *
4504 * PARAMETERS :
4505 *
4506 * RETURN :
4507 *
4508 *==========================================================================*/
4509FrameNumberRegistry::FrameNumberRegistry()
4510{
4511 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4512}
4513
4514/*===========================================================================
4515 * FUNCTION : ~FrameNumberRegistry
4516 *
4517 * DESCRIPTION: Destructor
4518 *
4519 * PARAMETERS :
4520 *
4521 * RETURN :
4522 *
4523 *==========================================================================*/
4524FrameNumberRegistry::~FrameNumberRegistry()
4525{
4526}
4527
4528/*===========================================================================
4529 * FUNCTION : PurgeOldEntriesLocked
4530 *
4531 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4532 *
4533 * PARAMETERS :
4534 *
4535 * RETURN : NONE
4536 *
4537 *==========================================================================*/
4538void FrameNumberRegistry::purgeOldEntriesLocked()
4539{
4540 while (_register.begin() != _register.end()) {
4541 auto itr = _register.begin();
4542 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4543 _register.erase(itr);
4544 } else {
4545 return;
4546 }
4547 }
4548}
4549
4550/*===========================================================================
4551 * FUNCTION : allocStoreInternalFrameNumber
4552 *
4553 * DESCRIPTION: Method to note down a framework request and associate a new
4554 * internal request number against it
4555 *
4556 * PARAMETERS :
4557 * @fFrameNumber: Identifier given by framework
4558 * @internalFN : Output parameter which will have the newly generated internal
4559 * entry
4560 *
4561 * RETURN : Error code
4562 *
4563 *==========================================================================*/
4564int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4565 uint32_t &internalFrameNumber)
4566{
4567 Mutex::Autolock lock(mRegistryLock);
4568 internalFrameNumber = _nextFreeInternalNumber++;
4569 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4570 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4571 purgeOldEntriesLocked();
4572 return NO_ERROR;
4573}
4574
4575/*===========================================================================
4576 * FUNCTION : generateStoreInternalFrameNumber
4577 *
4578 * DESCRIPTION: Method to associate a new internal request number independent
4579 * of any associate with framework requests
4580 *
4581 * PARAMETERS :
4582 * @internalFrame#: Output parameter which will have the newly generated internal
4583 *
4584 *
4585 * RETURN : Error code
4586 *
4587 *==========================================================================*/
4588int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4589{
4590 Mutex::Autolock lock(mRegistryLock);
4591 internalFrameNumber = _nextFreeInternalNumber++;
4592 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4593 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4594 purgeOldEntriesLocked();
4595 return NO_ERROR;
4596}
4597
4598/*===========================================================================
4599 * FUNCTION : getFrameworkFrameNumber
4600 *
4601 * DESCRIPTION: Method to query the framework framenumber given an internal #
4602 *
4603 * PARAMETERS :
4604 * @internalFrame#: Internal reference
4605 * @frameworkframenumber: Output parameter holding framework frame entry
4606 *
4607 * RETURN : Error code
4608 *
4609 *==========================================================================*/
4610int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4611 uint32_t &frameworkFrameNumber)
4612{
4613 Mutex::Autolock lock(mRegistryLock);
4614 auto itr = _register.find(internalFrameNumber);
4615 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004616 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004617 return -ENOENT;
4618 }
4619
4620 frameworkFrameNumber = itr->second;
4621 purgeOldEntriesLocked();
4622 return NO_ERROR;
4623}
Thierry Strudel3d639192016-09-09 11:52:26 -07004624
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004625status_t QCamera3HardwareInterface::fillPbStreamConfig(
4626 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4627 QCamera3Channel *channel, uint32_t streamIndex) {
4628 if (config == nullptr) {
4629 LOGE("%s: config is null", __FUNCTION__);
4630 return BAD_VALUE;
4631 }
4632
4633 if (channel == nullptr) {
4634 LOGE("%s: channel is null", __FUNCTION__);
4635 return BAD_VALUE;
4636 }
4637
4638 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4639 if (stream == nullptr) {
4640 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4641 return NAME_NOT_FOUND;
4642 }
4643
4644 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4645 if (streamInfo == nullptr) {
4646 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4647 return NAME_NOT_FOUND;
4648 }
4649
4650 config->id = pbStreamId;
4651 config->image.width = streamInfo->dim.width;
4652 config->image.height = streamInfo->dim.height;
4653 config->image.padding = 0;
4654 config->image.format = pbStreamFormat;
4655
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004656 uint32_t totalPlaneSize = 0;
4657
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004658 // Fill plane information.
4659 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4660 pbcamera::PlaneConfiguration plane;
4661 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4662 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4663 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004664
4665 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004666 }
4667
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004668 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004669 return OK;
4670}
4671
Thierry Strudel3d639192016-09-09 11:52:26 -07004672/*===========================================================================
4673 * FUNCTION : processCaptureRequest
4674 *
4675 * DESCRIPTION: process a capture request from camera service
4676 *
4677 * PARAMETERS :
4678 * @request : request from framework to process
4679 *
4680 * RETURN :
4681 *
4682 *==========================================================================*/
4683int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004684 camera3_capture_request_t *request,
4685 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004686{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004687 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004688 int rc = NO_ERROR;
4689 int32_t request_id;
4690 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004691 bool isVidBufRequested = false;
4692 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004693 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004694
4695 pthread_mutex_lock(&mMutex);
4696
4697 // Validate current state
4698 switch (mState) {
4699 case CONFIGURED:
4700 case STARTED:
4701 /* valid state */
4702 break;
4703
4704 case ERROR:
4705 pthread_mutex_unlock(&mMutex);
4706 handleCameraDeviceError();
4707 return -ENODEV;
4708
4709 default:
4710 LOGE("Invalid state %d", mState);
4711 pthread_mutex_unlock(&mMutex);
4712 return -ENODEV;
4713 }
4714
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004715 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004716 if (rc != NO_ERROR) {
4717 LOGE("incoming request is not valid");
4718 pthread_mutex_unlock(&mMutex);
4719 return rc;
4720 }
4721
4722 meta = request->settings;
4723
4724 // For first capture request, send capture intent, and
4725 // stream on all streams
4726 if (mState == CONFIGURED) {
4727 // send an unconfigure to the backend so that the isp
4728 // resources are deallocated
4729 if (!mFirstConfiguration) {
4730 cam_stream_size_info_t stream_config_info;
4731 int32_t hal_version = CAM_HAL_V3;
4732 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4733 stream_config_info.buffer_info.min_buffers =
4734 MIN_INFLIGHT_REQUESTS;
4735 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004736 m_bIs4KVideo ? 0 :
4737 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004738 clear_metadata_buffer(mParameters);
4739 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4740 CAM_INTF_PARM_HAL_VERSION, hal_version);
4741 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4742 CAM_INTF_META_STREAM_INFO, stream_config_info);
4743 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4744 mParameters);
4745 if (rc < 0) {
4746 LOGE("set_parms for unconfigure failed");
4747 pthread_mutex_unlock(&mMutex);
4748 return rc;
4749 }
4750 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004751 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004752 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004753 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004754 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004755 property_get("persist.camera.is_type", is_type_value, "4");
4756 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4757 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4758 property_get("persist.camera.is_type_preview", is_type_value, "4");
4759 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4760 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004761
4762 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4763 int32_t hal_version = CAM_HAL_V3;
4764 uint8_t captureIntent =
4765 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4766 mCaptureIntent = captureIntent;
4767 clear_metadata_buffer(mParameters);
4768 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4769 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4770 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004771 if (mFirstConfiguration) {
4772 // configure instant AEC
4773 // Instant AEC is a session based parameter and it is needed only
4774 // once per complete session after open camera.
4775 // i.e. This is set only once for the first capture request, after open camera.
4776 setInstantAEC(meta);
4777 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004778 uint8_t fwkVideoStabMode=0;
4779 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4780 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4781 }
4782
Xue Tuecac74e2017-04-17 13:58:15 -07004783 // If EIS setprop is enabled then only turn it on for video/preview
4784 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004785 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004786 int32_t vsMode;
4787 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4788 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4789 rc = BAD_VALUE;
4790 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004791 LOGD("setEis %d", setEis);
4792 bool eis3Supported = false;
4793 size_t count = IS_TYPE_MAX;
4794 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4795 for (size_t i = 0; i < count; i++) {
4796 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4797 eis3Supported = true;
4798 break;
4799 }
4800 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004801
4802 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004803 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004804 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4805 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004806 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4807 is_type = isTypePreview;
4808 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4809 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4810 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004811 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004812 } else {
4813 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004814 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004815 } else {
4816 is_type = IS_TYPE_NONE;
4817 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004818 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004819 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004820 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4821 }
4822 }
4823
4824 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4825 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4826
Thierry Strudel54dc9782017-02-15 12:12:10 -08004827 //Disable tintless only if the property is set to 0
4828 memset(prop, 0, sizeof(prop));
4829 property_get("persist.camera.tintless.enable", prop, "1");
4830 int32_t tintless_value = atoi(prop);
4831
Thierry Strudel3d639192016-09-09 11:52:26 -07004832 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4833 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004834
Thierry Strudel3d639192016-09-09 11:52:26 -07004835 //Disable CDS for HFR mode or if DIS/EIS is on.
4836 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4837 //after every configure_stream
4838 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4839 (m_bIsVideo)) {
4840 int32_t cds = CAM_CDS_MODE_OFF;
4841 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4842 CAM_INTF_PARM_CDS_MODE, cds))
4843 LOGE("Failed to disable CDS for HFR mode");
4844
4845 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004846
4847 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4848 uint8_t* use_av_timer = NULL;
4849
4850 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004851 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004852 use_av_timer = &m_debug_avtimer;
4853 }
4854 else{
4855 use_av_timer =
4856 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004857 if (use_av_timer) {
4858 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4859 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004860 }
4861
4862 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4863 rc = BAD_VALUE;
4864 }
4865 }
4866
Thierry Strudel3d639192016-09-09 11:52:26 -07004867 setMobicat();
4868
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004869 uint8_t nrMode = 0;
4870 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4871 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4872 }
4873
Thierry Strudel3d639192016-09-09 11:52:26 -07004874 /* Set fps and hfr mode while sending meta stream info so that sensor
4875 * can configure appropriate streaming mode */
4876 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004877 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4878 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004879 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4880 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004881 if (rc == NO_ERROR) {
4882 int32_t max_fps =
4883 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004884 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004885 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4886 }
4887 /* For HFR, more buffers are dequeued upfront to improve the performance */
4888 if (mBatchSize) {
4889 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4890 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4891 }
4892 }
4893 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004894 LOGE("setHalFpsRange failed");
4895 }
4896 }
4897 if (meta.exists(ANDROID_CONTROL_MODE)) {
4898 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4899 rc = extractSceneMode(meta, metaMode, mParameters);
4900 if (rc != NO_ERROR) {
4901 LOGE("extractSceneMode failed");
4902 }
4903 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004904 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004905
Thierry Strudel04e026f2016-10-10 11:27:36 -07004906 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4907 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4908 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4909 rc = setVideoHdrMode(mParameters, vhdr);
4910 if (rc != NO_ERROR) {
4911 LOGE("setVideoHDR is failed");
4912 }
4913 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004914
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004915 if (meta.exists(NEXUS_EXPERIMENTAL_2017_SENSOR_MODE_FULLFOV)) {
4916 uint8_t sensorModeFullFov =
4917 meta.find(NEXUS_EXPERIMENTAL_2017_SENSOR_MODE_FULLFOV).data.u8[0];
4918 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4919 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4920 sensorModeFullFov)) {
4921 rc = BAD_VALUE;
4922 }
4923 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004924 //TODO: validate the arguments, HSV scenemode should have only the
4925 //advertised fps ranges
4926
4927 /*set the capture intent, hal version, tintless, stream info,
4928 *and disenable parameters to the backend*/
4929 LOGD("set_parms META_STREAM_INFO " );
4930 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004931 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4932 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004933 mStreamConfigInfo.type[i],
4934 mStreamConfigInfo.stream_sizes[i].width,
4935 mStreamConfigInfo.stream_sizes[i].height,
4936 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004937 mStreamConfigInfo.format[i],
4938 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004939 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004940
Thierry Strudel3d639192016-09-09 11:52:26 -07004941 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4942 mParameters);
4943 if (rc < 0) {
4944 LOGE("set_parms failed for hal version, stream info");
4945 }
4946
Chien-Yu Chenee335912017-02-09 17:53:20 -08004947 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4948 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004949 if (rc != NO_ERROR) {
4950 LOGE("Failed to get sensor output size");
4951 pthread_mutex_unlock(&mMutex);
4952 goto error_exit;
4953 }
4954
4955 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4956 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004957 mSensorModeInfo.active_array_size.width,
4958 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004959
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004960 {
4961 Mutex::Autolock l(gHdrPlusClientLock);
4962 if (EaselManagerClientOpened) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004963 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004964 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
4965 if (rc != OK) {
4966 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
4967 mCameraId, mSensorModeInfo.op_pixel_clk);
4968 pthread_mutex_unlock(&mMutex);
4969 goto error_exit;
4970 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004971 }
4972 }
4973
Thierry Strudel3d639192016-09-09 11:52:26 -07004974 /* Set batchmode before initializing channel. Since registerBuffer
4975 * internally initializes some of the channels, better set batchmode
4976 * even before first register buffer */
4977 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4978 it != mStreamInfo.end(); it++) {
4979 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4980 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4981 && mBatchSize) {
4982 rc = channel->setBatchSize(mBatchSize);
4983 //Disable per frame map unmap for HFR/batchmode case
4984 rc |= channel->setPerFrameMapUnmap(false);
4985 if (NO_ERROR != rc) {
4986 LOGE("Channel init failed %d", rc);
4987 pthread_mutex_unlock(&mMutex);
4988 goto error_exit;
4989 }
4990 }
4991 }
4992
4993 //First initialize all streams
4994 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4995 it != mStreamInfo.end(); it++) {
4996 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004997
4998 /* Initial value of NR mode is needed before stream on */
4999 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005000 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5001 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005002 setEis) {
5003 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5004 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5005 is_type = mStreamConfigInfo.is_type[i];
5006 break;
5007 }
5008 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005010 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005011 rc = channel->initialize(IS_TYPE_NONE);
5012 }
5013 if (NO_ERROR != rc) {
5014 LOGE("Channel initialization failed %d", rc);
5015 pthread_mutex_unlock(&mMutex);
5016 goto error_exit;
5017 }
5018 }
5019
5020 if (mRawDumpChannel) {
5021 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5022 if (rc != NO_ERROR) {
5023 LOGE("Error: Raw Dump Channel init failed");
5024 pthread_mutex_unlock(&mMutex);
5025 goto error_exit;
5026 }
5027 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005028 if (mHdrPlusRawSrcChannel) {
5029 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5030 if (rc != NO_ERROR) {
5031 LOGE("Error: HDR+ RAW Source Channel init failed");
5032 pthread_mutex_unlock(&mMutex);
5033 goto error_exit;
5034 }
5035 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005036 if (mSupportChannel) {
5037 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5038 if (rc < 0) {
5039 LOGE("Support channel initialization failed");
5040 pthread_mutex_unlock(&mMutex);
5041 goto error_exit;
5042 }
5043 }
5044 if (mAnalysisChannel) {
5045 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5046 if (rc < 0) {
5047 LOGE("Analysis channel initialization failed");
5048 pthread_mutex_unlock(&mMutex);
5049 goto error_exit;
5050 }
5051 }
5052 if (mDummyBatchChannel) {
5053 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5054 if (rc < 0) {
5055 LOGE("mDummyBatchChannel setBatchSize failed");
5056 pthread_mutex_unlock(&mMutex);
5057 goto error_exit;
5058 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005059 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005060 if (rc < 0) {
5061 LOGE("mDummyBatchChannel initialization failed");
5062 pthread_mutex_unlock(&mMutex);
5063 goto error_exit;
5064 }
5065 }
5066
5067 // Set bundle info
5068 rc = setBundleInfo();
5069 if (rc < 0) {
5070 LOGE("setBundleInfo failed %d", rc);
5071 pthread_mutex_unlock(&mMutex);
5072 goto error_exit;
5073 }
5074
5075 //update settings from app here
5076 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5077 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5078 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5079 }
5080 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5081 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5082 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5083 }
5084 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5085 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5086 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5087
5088 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5089 (mLinkedCameraId != mCameraId) ) {
5090 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5091 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005092 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005093 goto error_exit;
5094 }
5095 }
5096
5097 // add bundle related cameras
5098 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5099 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005100 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5101 &m_pDualCamCmdPtr->bundle_info;
5102 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005103 if (mIsDeviceLinked)
5104 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5105 else
5106 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5107
5108 pthread_mutex_lock(&gCamLock);
5109
5110 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5111 LOGE("Dualcam: Invalid Session Id ");
5112 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005113 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005114 goto error_exit;
5115 }
5116
5117 if (mIsMainCamera == 1) {
5118 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5119 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005120 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005121 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005122 // related session id should be session id of linked session
5123 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5124 } else {
5125 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5126 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005127 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005128 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005129 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5130 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005131 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005132 pthread_mutex_unlock(&gCamLock);
5133
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005134 rc = mCameraHandle->ops->set_dual_cam_cmd(
5135 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005136 if (rc < 0) {
5137 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005138 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005139 goto error_exit;
5140 }
5141 }
5142
5143 //Then start them.
5144 LOGH("Start META Channel");
5145 rc = mMetadataChannel->start();
5146 if (rc < 0) {
5147 LOGE("META channel start failed");
5148 pthread_mutex_unlock(&mMutex);
5149 goto error_exit;
5150 }
5151
5152 if (mAnalysisChannel) {
5153 rc = mAnalysisChannel->start();
5154 if (rc < 0) {
5155 LOGE("Analysis channel start failed");
5156 mMetadataChannel->stop();
5157 pthread_mutex_unlock(&mMutex);
5158 goto error_exit;
5159 }
5160 }
5161
5162 if (mSupportChannel) {
5163 rc = mSupportChannel->start();
5164 if (rc < 0) {
5165 LOGE("Support channel start failed");
5166 mMetadataChannel->stop();
5167 /* Although support and analysis are mutually exclusive today
5168 adding it in anycase for future proofing */
5169 if (mAnalysisChannel) {
5170 mAnalysisChannel->stop();
5171 }
5172 pthread_mutex_unlock(&mMutex);
5173 goto error_exit;
5174 }
5175 }
5176 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5177 it != mStreamInfo.end(); it++) {
5178 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5179 LOGH("Start Processing Channel mask=%d",
5180 channel->getStreamTypeMask());
5181 rc = channel->start();
5182 if (rc < 0) {
5183 LOGE("channel start failed");
5184 pthread_mutex_unlock(&mMutex);
5185 goto error_exit;
5186 }
5187 }
5188
5189 if (mRawDumpChannel) {
5190 LOGD("Starting raw dump stream");
5191 rc = mRawDumpChannel->start();
5192 if (rc != NO_ERROR) {
5193 LOGE("Error Starting Raw Dump Channel");
5194 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5195 it != mStreamInfo.end(); it++) {
5196 QCamera3Channel *channel =
5197 (QCamera3Channel *)(*it)->stream->priv;
5198 LOGH("Stopping Processing Channel mask=%d",
5199 channel->getStreamTypeMask());
5200 channel->stop();
5201 }
5202 if (mSupportChannel)
5203 mSupportChannel->stop();
5204 if (mAnalysisChannel) {
5205 mAnalysisChannel->stop();
5206 }
5207 mMetadataChannel->stop();
5208 pthread_mutex_unlock(&mMutex);
5209 goto error_exit;
5210 }
5211 }
5212
5213 if (mChannelHandle) {
5214
5215 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5216 mChannelHandle);
5217 if (rc != NO_ERROR) {
5218 LOGE("start_channel failed %d", rc);
5219 pthread_mutex_unlock(&mMutex);
5220 goto error_exit;
5221 }
5222 }
5223
5224 goto no_error;
5225error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005226 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005227 return rc;
5228no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005229 mWokenUpByDaemon = false;
5230 mPendingLiveRequest = 0;
5231 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005232 }
5233
Chien-Yu Chenee335912017-02-09 17:53:20 -08005234 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005235 {
5236 Mutex::Autolock l(gHdrPlusClientLock);
5237 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5238 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5239 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5240 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5241 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5242 rc = enableHdrPlusModeLocked();
Chien-Yu Chenee335912017-02-09 17:53:20 -08005243 if (rc != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005244 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -08005245 pthread_mutex_unlock(&mMutex);
5246 return rc;
5247 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005248
5249 mFirstPreviewIntentSeen = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -08005250 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08005251 }
5252
Thierry Strudel3d639192016-09-09 11:52:26 -07005253 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005254 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005255
5256 if (mFlushPerf) {
5257 //we cannot accept any requests during flush
5258 LOGE("process_capture_request cannot proceed during flush");
5259 pthread_mutex_unlock(&mMutex);
5260 return NO_ERROR; //should return an error
5261 }
5262
5263 if (meta.exists(ANDROID_REQUEST_ID)) {
5264 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5265 mCurrentRequestId = request_id;
5266 LOGD("Received request with id: %d", request_id);
5267 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5268 LOGE("Unable to find request id field, \
5269 & no previous id available");
5270 pthread_mutex_unlock(&mMutex);
5271 return NAME_NOT_FOUND;
5272 } else {
5273 LOGD("Re-using old request id");
5274 request_id = mCurrentRequestId;
5275 }
5276
5277 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5278 request->num_output_buffers,
5279 request->input_buffer,
5280 frameNumber);
5281 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005282 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005283 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005284 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005285 uint32_t snapshotStreamId = 0;
5286 for (size_t i = 0; i < request->num_output_buffers; i++) {
5287 const camera3_stream_buffer_t& output = request->output_buffers[i];
5288 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5289
Emilian Peev7650c122017-01-19 08:24:33 -08005290 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5291 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005292 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005293 blob_request = 1;
5294 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5295 }
5296
5297 if (output.acquire_fence != -1) {
5298 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5299 close(output.acquire_fence);
5300 if (rc != OK) {
5301 LOGE("sync wait failed %d", rc);
5302 pthread_mutex_unlock(&mMutex);
5303 return rc;
5304 }
5305 }
5306
Emilian Peev0f3c3162017-03-15 12:57:46 +00005307 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5308 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005309 depthRequestPresent = true;
5310 continue;
5311 }
5312
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005313 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005314 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005315
5316 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5317 isVidBufRequested = true;
5318 }
5319 }
5320
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005321 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5322 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5323 itr++) {
5324 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5325 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5326 channel->getStreamID(channel->getStreamTypeMask());
5327
5328 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5329 isVidBufRequested = true;
5330 }
5331 }
5332
Thierry Strudel3d639192016-09-09 11:52:26 -07005333 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005334 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005335 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005336 }
5337 if (blob_request && mRawDumpChannel) {
5338 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005339 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005340 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005341 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005342 }
5343
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005344 {
5345 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5346 // Request a RAW buffer if
5347 // 1. mHdrPlusRawSrcChannel is valid.
5348 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5349 // 3. There is no pending HDR+ request.
5350 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5351 mHdrPlusPendingRequests.size() == 0) {
5352 streamsArray.stream_request[streamsArray.num_streams].streamID =
5353 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5354 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5355 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005356 }
5357
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005358 //extract capture intent
5359 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5360 mCaptureIntent =
5361 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5362 }
5363
5364 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5365 mCacMode =
5366 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5367 }
5368
5369 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005370 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005371
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005372 {
5373 Mutex::Autolock l(gHdrPlusClientLock);
5374 // If this request has a still capture intent, try to submit an HDR+ request.
5375 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5376 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5377 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5378 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005379 }
5380
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005381 if (hdrPlusRequest) {
5382 // For a HDR+ request, just set the frame parameters.
5383 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5384 if (rc < 0) {
5385 LOGE("fail to set frame parameters");
5386 pthread_mutex_unlock(&mMutex);
5387 return rc;
5388 }
5389 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005390 /* Parse the settings:
5391 * - For every request in NORMAL MODE
5392 * - For every request in HFR mode during preview only case
5393 * - For first request of every batch in HFR mode during video
5394 * recording. In batchmode the same settings except frame number is
5395 * repeated in each request of the batch.
5396 */
5397 if (!mBatchSize ||
5398 (mBatchSize && !isVidBufRequested) ||
5399 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005400 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005401 if (rc < 0) {
5402 LOGE("fail to set frame parameters");
5403 pthread_mutex_unlock(&mMutex);
5404 return rc;
5405 }
5406 }
5407 /* For batchMode HFR, setFrameParameters is not called for every
5408 * request. But only frame number of the latest request is parsed.
5409 * Keep track of first and last frame numbers in a batch so that
5410 * metadata for the frame numbers of batch can be duplicated in
5411 * handleBatchMetadta */
5412 if (mBatchSize) {
5413 if (!mToBeQueuedVidBufs) {
5414 //start of the batch
5415 mFirstFrameNumberInBatch = request->frame_number;
5416 }
5417 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5418 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5419 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005420 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005421 return BAD_VALUE;
5422 }
5423 }
5424 if (mNeedSensorRestart) {
5425 /* Unlock the mutex as restartSensor waits on the channels to be
5426 * stopped, which in turn calls stream callback functions -
5427 * handleBufferWithLock and handleMetadataWithLock */
5428 pthread_mutex_unlock(&mMutex);
5429 rc = dynamicUpdateMetaStreamInfo();
5430 if (rc != NO_ERROR) {
5431 LOGE("Restarting the sensor failed");
5432 return BAD_VALUE;
5433 }
5434 mNeedSensorRestart = false;
5435 pthread_mutex_lock(&mMutex);
5436 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005437 if(mResetInstantAEC) {
5438 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5439 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5440 mResetInstantAEC = false;
5441 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005442 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005443 if (request->input_buffer->acquire_fence != -1) {
5444 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5445 close(request->input_buffer->acquire_fence);
5446 if (rc != OK) {
5447 LOGE("input buffer sync wait failed %d", rc);
5448 pthread_mutex_unlock(&mMutex);
5449 return rc;
5450 }
5451 }
5452 }
5453
5454 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5455 mLastCustIntentFrmNum = frameNumber;
5456 }
5457 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005458 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005459 pendingRequestIterator latestRequest;
5460 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005461 pendingRequest.num_buffers = depthRequestPresent ?
5462 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005463 pendingRequest.request_id = request_id;
5464 pendingRequest.blob_request = blob_request;
5465 pendingRequest.timestamp = 0;
5466 pendingRequest.bUrgentReceived = 0;
5467 if (request->input_buffer) {
5468 pendingRequest.input_buffer =
5469 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5470 *(pendingRequest.input_buffer) = *(request->input_buffer);
5471 pInputBuffer = pendingRequest.input_buffer;
5472 } else {
5473 pendingRequest.input_buffer = NULL;
5474 pInputBuffer = NULL;
5475 }
5476
5477 pendingRequest.pipeline_depth = 0;
5478 pendingRequest.partial_result_cnt = 0;
5479 extractJpegMetadata(mCurJpegMeta, request);
5480 pendingRequest.jpegMetadata = mCurJpegMeta;
5481 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5482 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005483 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005484 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5485 mHybridAeEnable =
5486 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5487 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005488
5489 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5490 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005491 /* DevCamDebug metadata processCaptureRequest */
5492 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5493 mDevCamDebugMetaEnable =
5494 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5495 }
5496 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5497 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005498
5499 //extract CAC info
5500 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5501 mCacMode =
5502 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5503 }
5504 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005505 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005506
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005507 // extract enableZsl info
5508 if (gExposeEnableZslKey) {
5509 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5510 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5511 mZslEnabled = pendingRequest.enableZsl;
5512 } else {
5513 pendingRequest.enableZsl = mZslEnabled;
5514 }
5515 }
5516
Thierry Strudel3d639192016-09-09 11:52:26 -07005517 PendingBuffersInRequest bufsForCurRequest;
5518 bufsForCurRequest.frame_number = frameNumber;
5519 // Mark current timestamp for the new request
5520 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005521 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005522
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005523 if (hdrPlusRequest) {
5524 // Save settings for this request.
5525 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5526 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5527
5528 // Add to pending HDR+ request queue.
5529 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5530 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5531
5532 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5533 }
5534
Thierry Strudel3d639192016-09-09 11:52:26 -07005535 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005536 if ((request->output_buffers[i].stream->data_space ==
5537 HAL_DATASPACE_DEPTH) &&
5538 (HAL_PIXEL_FORMAT_BLOB ==
5539 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005540 continue;
5541 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005542 RequestedBufferInfo requestedBuf;
5543 memset(&requestedBuf, 0, sizeof(requestedBuf));
5544 requestedBuf.stream = request->output_buffers[i].stream;
5545 requestedBuf.buffer = NULL;
5546 pendingRequest.buffers.push_back(requestedBuf);
5547
5548 // Add to buffer handle the pending buffers list
5549 PendingBufferInfo bufferInfo;
5550 bufferInfo.buffer = request->output_buffers[i].buffer;
5551 bufferInfo.stream = request->output_buffers[i].stream;
5552 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5553 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5554 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5555 frameNumber, bufferInfo.buffer,
5556 channel->getStreamTypeMask(), bufferInfo.stream->format);
5557 }
5558 // Add this request packet into mPendingBuffersMap
5559 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5560 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5561 mPendingBuffersMap.get_num_overall_buffers());
5562
5563 latestRequest = mPendingRequestsList.insert(
5564 mPendingRequestsList.end(), pendingRequest);
5565 if(mFlush) {
5566 LOGI("mFlush is true");
5567 pthread_mutex_unlock(&mMutex);
5568 return NO_ERROR;
5569 }
5570
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005571 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5572 // channel.
5573 if (!hdrPlusRequest) {
5574 int indexUsed;
5575 // Notify metadata channel we receive a request
5576 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005577
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005578 if(request->input_buffer != NULL){
5579 LOGD("Input request, frame_number %d", frameNumber);
5580 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5581 if (NO_ERROR != rc) {
5582 LOGE("fail to set reproc parameters");
5583 pthread_mutex_unlock(&mMutex);
5584 return rc;
5585 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005586 }
5587
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005588 // Call request on other streams
5589 uint32_t streams_need_metadata = 0;
5590 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5591 for (size_t i = 0; i < request->num_output_buffers; i++) {
5592 const camera3_stream_buffer_t& output = request->output_buffers[i];
5593 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5594
5595 if (channel == NULL) {
5596 LOGW("invalid channel pointer for stream");
5597 continue;
5598 }
5599
5600 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5601 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5602 output.buffer, request->input_buffer, frameNumber);
5603 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005604 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005605 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5606 if (rc < 0) {
5607 LOGE("Fail to request on picture channel");
5608 pthread_mutex_unlock(&mMutex);
5609 return rc;
5610 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005611 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005612 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5613 assert(NULL != mDepthChannel);
5614 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005615
Emilian Peev7650c122017-01-19 08:24:33 -08005616 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5617 if (rc < 0) {
5618 LOGE("Fail to map on depth buffer");
5619 pthread_mutex_unlock(&mMutex);
5620 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005621 }
Emilian Peev7650c122017-01-19 08:24:33 -08005622 } else {
5623 LOGD("snapshot request with buffer %p, frame_number %d",
5624 output.buffer, frameNumber);
5625 if (!request->settings) {
5626 rc = channel->request(output.buffer, frameNumber,
5627 NULL, mPrevParameters, indexUsed);
5628 } else {
5629 rc = channel->request(output.buffer, frameNumber,
5630 NULL, mParameters, indexUsed);
5631 }
5632 if (rc < 0) {
5633 LOGE("Fail to request on picture channel");
5634 pthread_mutex_unlock(&mMutex);
5635 return rc;
5636 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005637
Emilian Peev7650c122017-01-19 08:24:33 -08005638 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5639 uint32_t j = 0;
5640 for (j = 0; j < streamsArray.num_streams; j++) {
5641 if (streamsArray.stream_request[j].streamID == streamId) {
5642 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5643 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5644 else
5645 streamsArray.stream_request[j].buf_index = indexUsed;
5646 break;
5647 }
5648 }
5649 if (j == streamsArray.num_streams) {
5650 LOGE("Did not find matching stream to update index");
5651 assert(0);
5652 }
5653
5654 pendingBufferIter->need_metadata = true;
5655 streams_need_metadata++;
5656 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005657 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005658 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5659 bool needMetadata = false;
5660 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5661 rc = yuvChannel->request(output.buffer, frameNumber,
5662 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5663 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005664 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005665 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005666 pthread_mutex_unlock(&mMutex);
5667 return rc;
5668 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005669
5670 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5671 uint32_t j = 0;
5672 for (j = 0; j < streamsArray.num_streams; j++) {
5673 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005674 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5675 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5676 else
5677 streamsArray.stream_request[j].buf_index = indexUsed;
5678 break;
5679 }
5680 }
5681 if (j == streamsArray.num_streams) {
5682 LOGE("Did not find matching stream to update index");
5683 assert(0);
5684 }
5685
5686 pendingBufferIter->need_metadata = needMetadata;
5687 if (needMetadata)
5688 streams_need_metadata += 1;
5689 LOGD("calling YUV channel request, need_metadata is %d",
5690 needMetadata);
5691 } else {
5692 LOGD("request with buffer %p, frame_number %d",
5693 output.buffer, frameNumber);
5694
5695 rc = channel->request(output.buffer, frameNumber, indexUsed);
5696
5697 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5698 uint32_t j = 0;
5699 for (j = 0; j < streamsArray.num_streams; j++) {
5700 if (streamsArray.stream_request[j].streamID == streamId) {
5701 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5702 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5703 else
5704 streamsArray.stream_request[j].buf_index = indexUsed;
5705 break;
5706 }
5707 }
5708 if (j == streamsArray.num_streams) {
5709 LOGE("Did not find matching stream to update index");
5710 assert(0);
5711 }
5712
5713 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5714 && mBatchSize) {
5715 mToBeQueuedVidBufs++;
5716 if (mToBeQueuedVidBufs == mBatchSize) {
5717 channel->queueBatchBuf();
5718 }
5719 }
5720 if (rc < 0) {
5721 LOGE("request failed");
5722 pthread_mutex_unlock(&mMutex);
5723 return rc;
5724 }
5725 }
5726 pendingBufferIter++;
5727 }
5728
5729 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5730 itr++) {
5731 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5732
5733 if (channel == NULL) {
5734 LOGE("invalid channel pointer for stream");
5735 assert(0);
5736 return BAD_VALUE;
5737 }
5738
5739 InternalRequest requestedStream;
5740 requestedStream = (*itr);
5741
5742
5743 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5744 LOGD("snapshot request internally input buffer %p, frame_number %d",
5745 request->input_buffer, frameNumber);
5746 if(request->input_buffer != NULL){
5747 rc = channel->request(NULL, frameNumber,
5748 pInputBuffer, &mReprocMeta, indexUsed, true,
5749 requestedStream.meteringOnly);
5750 if (rc < 0) {
5751 LOGE("Fail to request on picture channel");
5752 pthread_mutex_unlock(&mMutex);
5753 return rc;
5754 }
5755 } else {
5756 LOGD("snapshot request with frame_number %d", frameNumber);
5757 if (!request->settings) {
5758 rc = channel->request(NULL, frameNumber,
5759 NULL, mPrevParameters, indexUsed, true,
5760 requestedStream.meteringOnly);
5761 } else {
5762 rc = channel->request(NULL, frameNumber,
5763 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5764 }
5765 if (rc < 0) {
5766 LOGE("Fail to request on picture channel");
5767 pthread_mutex_unlock(&mMutex);
5768 return rc;
5769 }
5770
5771 if ((*itr).meteringOnly != 1) {
5772 requestedStream.need_metadata = 1;
5773 streams_need_metadata++;
5774 }
5775 }
5776
5777 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5778 uint32_t j = 0;
5779 for (j = 0; j < streamsArray.num_streams; j++) {
5780 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005781 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5782 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5783 else
5784 streamsArray.stream_request[j].buf_index = indexUsed;
5785 break;
5786 }
5787 }
5788 if (j == streamsArray.num_streams) {
5789 LOGE("Did not find matching stream to update index");
5790 assert(0);
5791 }
5792
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005793 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005794 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005795 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005796 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005797 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005798 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005799 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005800
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005801 //If 2 streams have need_metadata set to true, fail the request, unless
5802 //we copy/reference count the metadata buffer
5803 if (streams_need_metadata > 1) {
5804 LOGE("not supporting request in which two streams requires"
5805 " 2 HAL metadata for reprocessing");
5806 pthread_mutex_unlock(&mMutex);
5807 return -EINVAL;
5808 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005809
Emilian Peev7650c122017-01-19 08:24:33 -08005810 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5811 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5812 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5813 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5814 pthread_mutex_unlock(&mMutex);
5815 return BAD_VALUE;
5816 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005817 if (request->input_buffer == NULL) {
5818 /* Set the parameters to backend:
5819 * - For every request in NORMAL MODE
5820 * - For every request in HFR mode during preview only case
5821 * - Once every batch in HFR mode during video recording
5822 */
5823 if (!mBatchSize ||
5824 (mBatchSize && !isVidBufRequested) ||
5825 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5826 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5827 mBatchSize, isVidBufRequested,
5828 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005829
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005830 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5831 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5832 uint32_t m = 0;
5833 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5834 if (streamsArray.stream_request[k].streamID ==
5835 mBatchedStreamsArray.stream_request[m].streamID)
5836 break;
5837 }
5838 if (m == mBatchedStreamsArray.num_streams) {
5839 mBatchedStreamsArray.stream_request\
5840 [mBatchedStreamsArray.num_streams].streamID =
5841 streamsArray.stream_request[k].streamID;
5842 mBatchedStreamsArray.stream_request\
5843 [mBatchedStreamsArray.num_streams].buf_index =
5844 streamsArray.stream_request[k].buf_index;
5845 mBatchedStreamsArray.num_streams =
5846 mBatchedStreamsArray.num_streams + 1;
5847 }
5848 }
5849 streamsArray = mBatchedStreamsArray;
5850 }
5851 /* Update stream id of all the requested buffers */
5852 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5853 streamsArray)) {
5854 LOGE("Failed to set stream type mask in the parameters");
5855 return BAD_VALUE;
5856 }
5857
5858 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5859 mParameters);
5860 if (rc < 0) {
5861 LOGE("set_parms failed");
5862 }
5863 /* reset to zero coz, the batch is queued */
5864 mToBeQueuedVidBufs = 0;
5865 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5866 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5867 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005868 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5869 uint32_t m = 0;
5870 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5871 if (streamsArray.stream_request[k].streamID ==
5872 mBatchedStreamsArray.stream_request[m].streamID)
5873 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005874 }
5875 if (m == mBatchedStreamsArray.num_streams) {
5876 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5877 streamID = streamsArray.stream_request[k].streamID;
5878 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5879 buf_index = streamsArray.stream_request[k].buf_index;
5880 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5881 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005882 }
5883 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005884 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005885 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005886 }
5887
5888 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5889
5890 mState = STARTED;
5891 // Added a timed condition wait
5892 struct timespec ts;
5893 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005894 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005895 if (rc < 0) {
5896 isValidTimeout = 0;
5897 LOGE("Error reading the real time clock!!");
5898 }
5899 else {
5900 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005901 int64_t timeout = 5;
5902 {
5903 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5904 // If there is a pending HDR+ request, the following requests may be blocked until the
5905 // HDR+ request is done. So allow a longer timeout.
5906 if (mHdrPlusPendingRequests.size() > 0) {
5907 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5908 }
5909 }
5910 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005911 }
5912 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005913 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005914 (mState != ERROR) && (mState != DEINIT)) {
5915 if (!isValidTimeout) {
5916 LOGD("Blocking on conditional wait");
5917 pthread_cond_wait(&mRequestCond, &mMutex);
5918 }
5919 else {
5920 LOGD("Blocking on timed conditional wait");
5921 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5922 if (rc == ETIMEDOUT) {
5923 rc = -ENODEV;
5924 LOGE("Unblocked on timeout!!!!");
5925 break;
5926 }
5927 }
5928 LOGD("Unblocked");
5929 if (mWokenUpByDaemon) {
5930 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005931 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005932 break;
5933 }
5934 }
5935 pthread_mutex_unlock(&mMutex);
5936
5937 return rc;
5938}
5939
5940/*===========================================================================
5941 * FUNCTION : dump
5942 *
5943 * DESCRIPTION:
5944 *
5945 * PARAMETERS :
5946 *
5947 *
5948 * RETURN :
5949 *==========================================================================*/
5950void QCamera3HardwareInterface::dump(int fd)
5951{
5952 pthread_mutex_lock(&mMutex);
5953 dprintf(fd, "\n Camera HAL3 information Begin \n");
5954
5955 dprintf(fd, "\nNumber of pending requests: %zu \n",
5956 mPendingRequestsList.size());
5957 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5958 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5959 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5960 for(pendingRequestIterator i = mPendingRequestsList.begin();
5961 i != mPendingRequestsList.end(); i++) {
5962 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5963 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5964 i->input_buffer);
5965 }
5966 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5967 mPendingBuffersMap.get_num_overall_buffers());
5968 dprintf(fd, "-------+------------------\n");
5969 dprintf(fd, " Frame | Stream type mask \n");
5970 dprintf(fd, "-------+------------------\n");
5971 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5972 for(auto &j : req.mPendingBufferList) {
5973 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5974 dprintf(fd, " %5d | %11d \n",
5975 req.frame_number, channel->getStreamTypeMask());
5976 }
5977 }
5978 dprintf(fd, "-------+------------------\n");
5979
5980 dprintf(fd, "\nPending frame drop list: %zu\n",
5981 mPendingFrameDropList.size());
5982 dprintf(fd, "-------+-----------\n");
5983 dprintf(fd, " Frame | Stream ID \n");
5984 dprintf(fd, "-------+-----------\n");
5985 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5986 i != mPendingFrameDropList.end(); i++) {
5987 dprintf(fd, " %5d | %9d \n",
5988 i->frame_number, i->stream_ID);
5989 }
5990 dprintf(fd, "-------+-----------\n");
5991
5992 dprintf(fd, "\n Camera HAL3 information End \n");
5993
5994 /* use dumpsys media.camera as trigger to send update debug level event */
5995 mUpdateDebugLevel = true;
5996 pthread_mutex_unlock(&mMutex);
5997 return;
5998}
5999
6000/*===========================================================================
6001 * FUNCTION : flush
6002 *
6003 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6004 * conditionally restarts channels
6005 *
6006 * PARAMETERS :
6007 * @ restartChannels: re-start all channels
6008 *
6009 *
6010 * RETURN :
6011 * 0 on success
6012 * Error code on failure
6013 *==========================================================================*/
6014int QCamera3HardwareInterface::flush(bool restartChannels)
6015{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006016 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006017 int32_t rc = NO_ERROR;
6018
6019 LOGD("Unblocking Process Capture Request");
6020 pthread_mutex_lock(&mMutex);
6021 mFlush = true;
6022 pthread_mutex_unlock(&mMutex);
6023
6024 rc = stopAllChannels();
6025 // unlink of dualcam
6026 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006027 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6028 &m_pDualCamCmdPtr->bundle_info;
6029 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006030 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6031 pthread_mutex_lock(&gCamLock);
6032
6033 if (mIsMainCamera == 1) {
6034 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6035 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006036 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006037 // related session id should be session id of linked session
6038 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6039 } else {
6040 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6041 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006042 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006043 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6044 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006045 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006046 pthread_mutex_unlock(&gCamLock);
6047
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006048 rc = mCameraHandle->ops->set_dual_cam_cmd(
6049 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006050 if (rc < 0) {
6051 LOGE("Dualcam: Unlink failed, but still proceed to close");
6052 }
6053 }
6054
6055 if (rc < 0) {
6056 LOGE("stopAllChannels failed");
6057 return rc;
6058 }
6059 if (mChannelHandle) {
6060 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6061 mChannelHandle);
6062 }
6063
6064 // Reset bundle info
6065 rc = setBundleInfo();
6066 if (rc < 0) {
6067 LOGE("setBundleInfo failed %d", rc);
6068 return rc;
6069 }
6070
6071 // Mutex Lock
6072 pthread_mutex_lock(&mMutex);
6073
6074 // Unblock process_capture_request
6075 mPendingLiveRequest = 0;
6076 pthread_cond_signal(&mRequestCond);
6077
6078 rc = notifyErrorForPendingRequests();
6079 if (rc < 0) {
6080 LOGE("notifyErrorForPendingRequests failed");
6081 pthread_mutex_unlock(&mMutex);
6082 return rc;
6083 }
6084
6085 mFlush = false;
6086
6087 // Start the Streams/Channels
6088 if (restartChannels) {
6089 rc = startAllChannels();
6090 if (rc < 0) {
6091 LOGE("startAllChannels failed");
6092 pthread_mutex_unlock(&mMutex);
6093 return rc;
6094 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006095 if (mChannelHandle) {
6096 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6097 mChannelHandle);
6098 if (rc < 0) {
6099 LOGE("start_channel failed");
6100 pthread_mutex_unlock(&mMutex);
6101 return rc;
6102 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006103 }
6104 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006105 pthread_mutex_unlock(&mMutex);
6106
6107 return 0;
6108}
6109
6110/*===========================================================================
6111 * FUNCTION : flushPerf
6112 *
6113 * DESCRIPTION: This is the performance optimization version of flush that does
6114 * not use stream off, rather flushes the system
6115 *
6116 * PARAMETERS :
6117 *
6118 *
6119 * RETURN : 0 : success
6120 * -EINVAL: input is malformed (device is not valid)
6121 * -ENODEV: if the device has encountered a serious error
6122 *==========================================================================*/
6123int QCamera3HardwareInterface::flushPerf()
6124{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006125 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006126 int32_t rc = 0;
6127 struct timespec timeout;
6128 bool timed_wait = false;
6129
6130 pthread_mutex_lock(&mMutex);
6131 mFlushPerf = true;
6132 mPendingBuffersMap.numPendingBufsAtFlush =
6133 mPendingBuffersMap.get_num_overall_buffers();
6134 LOGD("Calling flush. Wait for %d buffers to return",
6135 mPendingBuffersMap.numPendingBufsAtFlush);
6136
6137 /* send the flush event to the backend */
6138 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6139 if (rc < 0) {
6140 LOGE("Error in flush: IOCTL failure");
6141 mFlushPerf = false;
6142 pthread_mutex_unlock(&mMutex);
6143 return -ENODEV;
6144 }
6145
6146 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6147 LOGD("No pending buffers in HAL, return flush");
6148 mFlushPerf = false;
6149 pthread_mutex_unlock(&mMutex);
6150 return rc;
6151 }
6152
6153 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006154 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006155 if (rc < 0) {
6156 LOGE("Error reading the real time clock, cannot use timed wait");
6157 } else {
6158 timeout.tv_sec += FLUSH_TIMEOUT;
6159 timed_wait = true;
6160 }
6161
6162 //Block on conditional variable
6163 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6164 LOGD("Waiting on mBuffersCond");
6165 if (!timed_wait) {
6166 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6167 if (rc != 0) {
6168 LOGE("pthread_cond_wait failed due to rc = %s",
6169 strerror(rc));
6170 break;
6171 }
6172 } else {
6173 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6174 if (rc != 0) {
6175 LOGE("pthread_cond_timedwait failed due to rc = %s",
6176 strerror(rc));
6177 break;
6178 }
6179 }
6180 }
6181 if (rc != 0) {
6182 mFlushPerf = false;
6183 pthread_mutex_unlock(&mMutex);
6184 return -ENODEV;
6185 }
6186
6187 LOGD("Received buffers, now safe to return them");
6188
6189 //make sure the channels handle flush
6190 //currently only required for the picture channel to release snapshot resources
6191 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6192 it != mStreamInfo.end(); it++) {
6193 QCamera3Channel *channel = (*it)->channel;
6194 if (channel) {
6195 rc = channel->flush();
6196 if (rc) {
6197 LOGE("Flushing the channels failed with error %d", rc);
6198 // even though the channel flush failed we need to continue and
6199 // return the buffers we have to the framework, however the return
6200 // value will be an error
6201 rc = -ENODEV;
6202 }
6203 }
6204 }
6205
6206 /* notify the frameworks and send errored results */
6207 rc = notifyErrorForPendingRequests();
6208 if (rc < 0) {
6209 LOGE("notifyErrorForPendingRequests failed");
6210 pthread_mutex_unlock(&mMutex);
6211 return rc;
6212 }
6213
6214 //unblock process_capture_request
6215 mPendingLiveRequest = 0;
6216 unblockRequestIfNecessary();
6217
6218 mFlushPerf = false;
6219 pthread_mutex_unlock(&mMutex);
6220 LOGD ("Flush Operation complete. rc = %d", rc);
6221 return rc;
6222}
6223
6224/*===========================================================================
6225 * FUNCTION : handleCameraDeviceError
6226 *
6227 * DESCRIPTION: This function calls internal flush and notifies the error to
6228 * framework and updates the state variable.
6229 *
6230 * PARAMETERS : None
6231 *
6232 * RETURN : NO_ERROR on Success
6233 * Error code on failure
6234 *==========================================================================*/
6235int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6236{
6237 int32_t rc = NO_ERROR;
6238
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006239 {
6240 Mutex::Autolock lock(mFlushLock);
6241 pthread_mutex_lock(&mMutex);
6242 if (mState != ERROR) {
6243 //if mState != ERROR, nothing to be done
6244 pthread_mutex_unlock(&mMutex);
6245 return NO_ERROR;
6246 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006247 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006248
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006249 rc = flush(false /* restart channels */);
6250 if (NO_ERROR != rc) {
6251 LOGE("internal flush to handle mState = ERROR failed");
6252 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006253
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006254 pthread_mutex_lock(&mMutex);
6255 mState = DEINIT;
6256 pthread_mutex_unlock(&mMutex);
6257 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006258
6259 camera3_notify_msg_t notify_msg;
6260 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6261 notify_msg.type = CAMERA3_MSG_ERROR;
6262 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6263 notify_msg.message.error.error_stream = NULL;
6264 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006265 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006266
6267 return rc;
6268}
6269
6270/*===========================================================================
6271 * FUNCTION : captureResultCb
6272 *
6273 * DESCRIPTION: Callback handler for all capture result
6274 * (streams, as well as metadata)
6275 *
6276 * PARAMETERS :
6277 * @metadata : metadata information
6278 * @buffer : actual gralloc buffer to be returned to frameworks.
6279 * NULL if metadata.
6280 *
6281 * RETURN : NONE
6282 *==========================================================================*/
6283void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6284 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6285{
6286 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006287 pthread_mutex_lock(&mMutex);
6288 uint8_t batchSize = mBatchSize;
6289 pthread_mutex_unlock(&mMutex);
6290 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006291 handleBatchMetadata(metadata_buf,
6292 true /* free_and_bufdone_meta_buf */);
6293 } else { /* mBatchSize = 0 */
6294 hdrPlusPerfLock(metadata_buf);
6295 pthread_mutex_lock(&mMutex);
6296 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006297 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006298 true /* last urgent frame of batch metadata */,
6299 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006300 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006301 pthread_mutex_unlock(&mMutex);
6302 }
6303 } else if (isInputBuffer) {
6304 pthread_mutex_lock(&mMutex);
6305 handleInputBufferWithLock(frame_number);
6306 pthread_mutex_unlock(&mMutex);
6307 } else {
6308 pthread_mutex_lock(&mMutex);
6309 handleBufferWithLock(buffer, frame_number);
6310 pthread_mutex_unlock(&mMutex);
6311 }
6312 return;
6313}
6314
6315/*===========================================================================
6316 * FUNCTION : getReprocessibleOutputStreamId
6317 *
6318 * DESCRIPTION: Get source output stream id for the input reprocess stream
6319 * based on size and format, which would be the largest
6320 * output stream if an input stream exists.
6321 *
6322 * PARAMETERS :
6323 * @id : return the stream id if found
6324 *
6325 * RETURN : int32_t type of status
6326 * NO_ERROR -- success
6327 * none-zero failure code
6328 *==========================================================================*/
6329int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6330{
6331 /* check if any output or bidirectional stream with the same size and format
6332 and return that stream */
6333 if ((mInputStreamInfo.dim.width > 0) &&
6334 (mInputStreamInfo.dim.height > 0)) {
6335 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6336 it != mStreamInfo.end(); it++) {
6337
6338 camera3_stream_t *stream = (*it)->stream;
6339 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6340 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6341 (stream->format == mInputStreamInfo.format)) {
6342 // Usage flag for an input stream and the source output stream
6343 // may be different.
6344 LOGD("Found reprocessible output stream! %p", *it);
6345 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6346 stream->usage, mInputStreamInfo.usage);
6347
6348 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6349 if (channel != NULL && channel->mStreams[0]) {
6350 id = channel->mStreams[0]->getMyServerID();
6351 return NO_ERROR;
6352 }
6353 }
6354 }
6355 } else {
6356 LOGD("No input stream, so no reprocessible output stream");
6357 }
6358 return NAME_NOT_FOUND;
6359}
6360
6361/*===========================================================================
6362 * FUNCTION : lookupFwkName
6363 *
6364 * DESCRIPTION: In case the enum is not same in fwk and backend
6365 * make sure the parameter is correctly propogated
6366 *
6367 * PARAMETERS :
6368 * @arr : map between the two enums
6369 * @len : len of the map
6370 * @hal_name : name of the hal_parm to map
6371 *
6372 * RETURN : int type of status
6373 * fwk_name -- success
6374 * none-zero failure code
6375 *==========================================================================*/
6376template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6377 size_t len, halType hal_name)
6378{
6379
6380 for (size_t i = 0; i < len; i++) {
6381 if (arr[i].hal_name == hal_name) {
6382 return arr[i].fwk_name;
6383 }
6384 }
6385
6386 /* Not able to find matching framework type is not necessarily
6387 * an error case. This happens when mm-camera supports more attributes
6388 * than the frameworks do */
6389 LOGH("Cannot find matching framework type");
6390 return NAME_NOT_FOUND;
6391}
6392
6393/*===========================================================================
6394 * FUNCTION : lookupHalName
6395 *
6396 * DESCRIPTION: In case the enum is not same in fwk and backend
6397 * make sure the parameter is correctly propogated
6398 *
6399 * PARAMETERS :
6400 * @arr : map between the two enums
6401 * @len : len of the map
6402 * @fwk_name : name of the hal_parm to map
6403 *
6404 * RETURN : int32_t type of status
6405 * hal_name -- success
6406 * none-zero failure code
6407 *==========================================================================*/
6408template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6409 size_t len, fwkType fwk_name)
6410{
6411 for (size_t i = 0; i < len; i++) {
6412 if (arr[i].fwk_name == fwk_name) {
6413 return arr[i].hal_name;
6414 }
6415 }
6416
6417 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6418 return NAME_NOT_FOUND;
6419}
6420
6421/*===========================================================================
6422 * FUNCTION : lookupProp
6423 *
6424 * DESCRIPTION: lookup a value by its name
6425 *
6426 * PARAMETERS :
6427 * @arr : map between the two enums
6428 * @len : size of the map
6429 * @name : name to be looked up
6430 *
6431 * RETURN : Value if found
6432 * CAM_CDS_MODE_MAX if not found
6433 *==========================================================================*/
6434template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6435 size_t len, const char *name)
6436{
6437 if (name) {
6438 for (size_t i = 0; i < len; i++) {
6439 if (!strcmp(arr[i].desc, name)) {
6440 return arr[i].val;
6441 }
6442 }
6443 }
6444 return CAM_CDS_MODE_MAX;
6445}
6446
6447/*===========================================================================
6448 *
6449 * DESCRIPTION:
6450 *
6451 * PARAMETERS :
6452 * @metadata : metadata information from callback
6453 * @timestamp: metadata buffer timestamp
6454 * @request_id: request id
6455 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006456 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006457 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6458 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006459 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006460 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6461 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006462 *
6463 * RETURN : camera_metadata_t*
6464 * metadata in a format specified by fwk
6465 *==========================================================================*/
6466camera_metadata_t*
6467QCamera3HardwareInterface::translateFromHalMetadata(
6468 metadata_buffer_t *metadata,
6469 nsecs_t timestamp,
6470 int32_t request_id,
6471 const CameraMetadata& jpegMetadata,
6472 uint8_t pipeline_depth,
6473 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006474 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006475 /* DevCamDebug metadata translateFromHalMetadata argument */
6476 uint8_t DevCamDebug_meta_enable,
6477 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006478 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006479 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006480 bool lastMetadataInBatch,
6481 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006482{
6483 CameraMetadata camMetadata;
6484 camera_metadata_t *resultMetadata;
6485
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006486 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006487 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6488 * Timestamp is needed because it's used for shutter notify calculation.
6489 * */
6490 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6491 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006492 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006493 }
6494
Thierry Strudel3d639192016-09-09 11:52:26 -07006495 if (jpegMetadata.entryCount())
6496 camMetadata.append(jpegMetadata);
6497
6498 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6499 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6500 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6501 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006502 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006503 if (mBatchSize == 0) {
6504 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6505 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6506 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006507
Samuel Ha68ba5172016-12-15 18:41:12 -08006508 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6509 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6510 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6511 // DevCamDebug metadata translateFromHalMetadata AF
6512 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6513 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6514 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6515 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6516 }
6517 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6518 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6519 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6520 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6521 }
6522 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6523 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6524 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6525 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6526 }
6527 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6528 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6529 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6530 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6531 }
6532 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6533 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6534 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6535 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6536 }
6537 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6538 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6539 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6540 *DevCamDebug_af_monitor_pdaf_target_pos;
6541 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6542 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6543 }
6544 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6545 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6546 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6547 *DevCamDebug_af_monitor_pdaf_confidence;
6548 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6549 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6550 }
6551 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6552 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6553 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6554 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6555 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6556 }
6557 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6558 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6559 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6560 *DevCamDebug_af_monitor_tof_target_pos;
6561 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6562 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6563 }
6564 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6565 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6566 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6567 *DevCamDebug_af_monitor_tof_confidence;
6568 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6569 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6570 }
6571 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6572 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6573 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6574 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6575 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6576 }
6577 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6578 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6579 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6580 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6581 &fwk_DevCamDebug_af_monitor_type_select, 1);
6582 }
6583 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6584 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6585 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6586 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6587 &fwk_DevCamDebug_af_monitor_refocus, 1);
6588 }
6589 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6590 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6591 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6592 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6593 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6594 }
6595 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6596 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6597 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6598 *DevCamDebug_af_search_pdaf_target_pos;
6599 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6600 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6601 }
6602 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6603 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6604 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6605 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6606 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6607 }
6608 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6609 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6610 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6611 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6612 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6613 }
6614 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6615 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6616 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6617 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6618 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6619 }
6620 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6621 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6622 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6623 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6624 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6625 }
6626 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6627 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6628 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6629 *DevCamDebug_af_search_tof_target_pos;
6630 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6631 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6632 }
6633 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6634 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6635 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6636 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6637 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6638 }
6639 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6640 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6641 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6642 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6643 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6644 }
6645 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6646 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6647 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6648 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6649 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6650 }
6651 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6652 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6653 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6654 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6655 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6656 }
6657 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6658 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6659 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6660 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6661 &fwk_DevCamDebug_af_search_type_select, 1);
6662 }
6663 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6664 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6665 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6666 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6667 &fwk_DevCamDebug_af_search_next_pos, 1);
6668 }
6669 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6670 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6671 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6672 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6673 &fwk_DevCamDebug_af_search_target_pos, 1);
6674 }
6675 // DevCamDebug metadata translateFromHalMetadata AEC
6676 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6677 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6678 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6679 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6680 }
6681 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6682 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6683 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6684 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6685 }
6686 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6687 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6688 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6689 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6690 }
6691 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6692 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6693 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6694 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6695 }
6696 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6697 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6698 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6699 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6700 }
6701 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6702 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6703 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6704 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6705 }
6706 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6707 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6708 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6709 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6710 }
6711 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6712 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6713 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6714 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6715 }
Samuel Ha34229982017-02-17 13:51:11 -08006716 // DevCamDebug metadata translateFromHalMetadata zzHDR
6717 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6718 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6719 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6720 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6721 }
6722 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6723 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006724 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006725 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6726 }
6727 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6728 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6729 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6730 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6731 }
6732 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6733 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006734 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006735 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6736 }
6737 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6738 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6739 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6740 *DevCamDebug_aec_hdr_sensitivity_ratio;
6741 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6742 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6743 }
6744 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6745 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6746 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6747 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6748 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6749 }
6750 // DevCamDebug metadata translateFromHalMetadata ADRC
6751 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6752 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6753 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6754 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6755 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6756 }
6757 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6758 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6759 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6760 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6761 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6762 }
6763 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6764 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6765 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6766 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6767 }
6768 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6769 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6770 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6771 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6772 }
6773 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6774 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6775 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6776 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6777 }
6778 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6779 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6780 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6781 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6782 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006783 // DevCamDebug metadata translateFromHalMetadata AWB
6784 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6785 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6786 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6787 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6788 }
6789 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6790 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6791 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6792 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6793 }
6794 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6795 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6796 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6797 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6798 }
6799 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6800 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6801 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6802 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6803 }
6804 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6805 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6806 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6807 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6808 }
6809 }
6810 // atrace_end(ATRACE_TAG_ALWAYS);
6811
Thierry Strudel3d639192016-09-09 11:52:26 -07006812 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6813 int64_t fwk_frame_number = *frame_number;
6814 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6815 }
6816
6817 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6818 int32_t fps_range[2];
6819 fps_range[0] = (int32_t)float_range->min_fps;
6820 fps_range[1] = (int32_t)float_range->max_fps;
6821 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6822 fps_range, 2);
6823 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6824 fps_range[0], fps_range[1]);
6825 }
6826
6827 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6828 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6829 }
6830
6831 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6832 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6833 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6834 *sceneMode);
6835 if (NAME_NOT_FOUND != val) {
6836 uint8_t fwkSceneMode = (uint8_t)val;
6837 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6838 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6839 fwkSceneMode);
6840 }
6841 }
6842
6843 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6844 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6845 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6846 }
6847
6848 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6849 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6850 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6851 }
6852
6853 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6854 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6855 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6856 }
6857
6858 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6859 CAM_INTF_META_EDGE_MODE, metadata) {
6860 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6861 }
6862
6863 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6864 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6865 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6866 }
6867
6868 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6869 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6870 }
6871
6872 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6873 if (0 <= *flashState) {
6874 uint8_t fwk_flashState = (uint8_t) *flashState;
6875 if (!gCamCapability[mCameraId]->flash_available) {
6876 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6877 }
6878 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6879 }
6880 }
6881
6882 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6883 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6884 if (NAME_NOT_FOUND != val) {
6885 uint8_t fwk_flashMode = (uint8_t)val;
6886 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6887 }
6888 }
6889
6890 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6891 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6892 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6893 }
6894
6895 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6896 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6897 }
6898
6899 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6900 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6901 }
6902
6903 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6904 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6905 }
6906
6907 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6908 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6909 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6910 }
6911
6912 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6913 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6914 LOGD("fwk_videoStab = %d", fwk_videoStab);
6915 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6916 } else {
6917 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6918 // and so hardcoding the Video Stab result to OFF mode.
6919 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6920 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006921 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006922 }
6923
6924 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6925 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6926 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6927 }
6928
6929 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6930 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6931 }
6932
Thierry Strudel3d639192016-09-09 11:52:26 -07006933 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6934 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006935 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006936
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006937 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6938 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006939
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006940 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006941 blackLevelAppliedPattern->cam_black_level[0],
6942 blackLevelAppliedPattern->cam_black_level[1],
6943 blackLevelAppliedPattern->cam_black_level[2],
6944 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006945 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6946 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006947
6948#ifndef USE_HAL_3_3
6949 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05306950 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07006951 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05306952 fwk_blackLevelInd[0] /= 16.0;
6953 fwk_blackLevelInd[1] /= 16.0;
6954 fwk_blackLevelInd[2] /= 16.0;
6955 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006956 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6957 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006958#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006959 }
6960
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006961#ifndef USE_HAL_3_3
6962 // Fixed whitelevel is used by ISP/Sensor
6963 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6964 &gCamCapability[mCameraId]->white_level, 1);
6965#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006966
6967 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6968 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6969 int32_t scalerCropRegion[4];
6970 scalerCropRegion[0] = hScalerCropRegion->left;
6971 scalerCropRegion[1] = hScalerCropRegion->top;
6972 scalerCropRegion[2] = hScalerCropRegion->width;
6973 scalerCropRegion[3] = hScalerCropRegion->height;
6974
6975 // Adjust crop region from sensor output coordinate system to active
6976 // array coordinate system.
6977 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6978 scalerCropRegion[2], scalerCropRegion[3]);
6979
6980 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6981 }
6982
6983 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6984 LOGD("sensorExpTime = %lld", *sensorExpTime);
6985 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6986 }
6987
6988 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6989 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6990 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6991 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6992 }
6993
6994 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6995 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6996 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6997 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6998 sensorRollingShutterSkew, 1);
6999 }
7000
7001 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7002 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7003 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7004
7005 //calculate the noise profile based on sensitivity
7006 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7007 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7008 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7009 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7010 noise_profile[i] = noise_profile_S;
7011 noise_profile[i+1] = noise_profile_O;
7012 }
7013 LOGD("noise model entry (S, O) is (%f, %f)",
7014 noise_profile_S, noise_profile_O);
7015 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7016 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7017 }
7018
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007019#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007020 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007021 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007022 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007023 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007024 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7025 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7026 }
7027 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007028#endif
7029
Thierry Strudel3d639192016-09-09 11:52:26 -07007030 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7031 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7032 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7033 }
7034
7035 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7036 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7037 *faceDetectMode);
7038 if (NAME_NOT_FOUND != val) {
7039 uint8_t fwk_faceDetectMode = (uint8_t)val;
7040 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7041
7042 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7043 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7044 CAM_INTF_META_FACE_DETECTION, metadata) {
7045 uint8_t numFaces = MIN(
7046 faceDetectionInfo->num_faces_detected, MAX_ROI);
7047 int32_t faceIds[MAX_ROI];
7048 uint8_t faceScores[MAX_ROI];
7049 int32_t faceRectangles[MAX_ROI * 4];
7050 int32_t faceLandmarks[MAX_ROI * 6];
7051 size_t j = 0, k = 0;
7052
7053 for (size_t i = 0; i < numFaces; i++) {
7054 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7055 // Adjust crop region from sensor output coordinate system to active
7056 // array coordinate system.
7057 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7058 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7059 rect.width, rect.height);
7060
7061 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7062 faceRectangles+j, -1);
7063
Jason Lee8ce36fa2017-04-19 19:40:37 -07007064 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7065 "bottom-right (%d, %d)",
7066 faceDetectionInfo->frame_id, i,
7067 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7068 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7069
Thierry Strudel3d639192016-09-09 11:52:26 -07007070 j+= 4;
7071 }
7072 if (numFaces <= 0) {
7073 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7074 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7075 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7076 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7077 }
7078
7079 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7080 numFaces);
7081 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7082 faceRectangles, numFaces * 4U);
7083 if (fwk_faceDetectMode ==
7084 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7085 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7086 CAM_INTF_META_FACE_LANDMARK, metadata) {
7087
7088 for (size_t i = 0; i < numFaces; i++) {
7089 // Map the co-ordinate sensor output coordinate system to active
7090 // array coordinate system.
7091 mCropRegionMapper.toActiveArray(
7092 landmarks->face_landmarks[i].left_eye_center.x,
7093 landmarks->face_landmarks[i].left_eye_center.y);
7094 mCropRegionMapper.toActiveArray(
7095 landmarks->face_landmarks[i].right_eye_center.x,
7096 landmarks->face_landmarks[i].right_eye_center.y);
7097 mCropRegionMapper.toActiveArray(
7098 landmarks->face_landmarks[i].mouth_center.x,
7099 landmarks->face_landmarks[i].mouth_center.y);
7100
7101 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007102
7103 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7104 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7105 faceDetectionInfo->frame_id, i,
7106 faceLandmarks[k + LEFT_EYE_X],
7107 faceLandmarks[k + LEFT_EYE_Y],
7108 faceLandmarks[k + RIGHT_EYE_X],
7109 faceLandmarks[k + RIGHT_EYE_Y],
7110 faceLandmarks[k + MOUTH_X],
7111 faceLandmarks[k + MOUTH_Y]);
7112
Thierry Strudel04e026f2016-10-10 11:27:36 -07007113 k+= TOTAL_LANDMARK_INDICES;
7114 }
7115 } else {
7116 for (size_t i = 0; i < numFaces; i++) {
7117 setInvalidLandmarks(faceLandmarks+k);
7118 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007119 }
7120 }
7121
7122 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7123 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7124 faceLandmarks, numFaces * 6U);
7125 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007126 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7127 CAM_INTF_META_FACE_BLINK, metadata) {
7128 uint8_t detected[MAX_ROI];
7129 uint8_t degree[MAX_ROI * 2];
7130 for (size_t i = 0; i < numFaces; i++) {
7131 detected[i] = blinks->blink[i].blink_detected;
7132 degree[2 * i] = blinks->blink[i].left_blink;
7133 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007134
7135 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7136 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7137 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7138 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007139 }
7140 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7141 detected, numFaces);
7142 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7143 degree, numFaces * 2);
7144 }
7145 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7146 CAM_INTF_META_FACE_SMILE, metadata) {
7147 uint8_t degree[MAX_ROI];
7148 uint8_t confidence[MAX_ROI];
7149 for (size_t i = 0; i < numFaces; i++) {
7150 degree[i] = smiles->smile[i].smile_degree;
7151 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007152
7153 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7154 "smile_degree=%d, smile_score=%d",
7155 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007156 }
7157 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7158 degree, numFaces);
7159 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7160 confidence, numFaces);
7161 }
7162 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7163 CAM_INTF_META_FACE_GAZE, metadata) {
7164 int8_t angle[MAX_ROI];
7165 int32_t direction[MAX_ROI * 3];
7166 int8_t degree[MAX_ROI * 2];
7167 for (size_t i = 0; i < numFaces; i++) {
7168 angle[i] = gazes->gaze[i].gaze_angle;
7169 direction[3 * i] = gazes->gaze[i].updown_dir;
7170 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7171 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7172 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7173 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007174
7175 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7176 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7177 "left_right_gaze=%d, top_bottom_gaze=%d",
7178 faceDetectionInfo->frame_id, i, angle[i],
7179 direction[3 * i], direction[3 * i + 1],
7180 direction[3 * i + 2],
7181 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007182 }
7183 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7184 (uint8_t *)angle, numFaces);
7185 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7186 direction, numFaces * 3);
7187 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7188 (uint8_t *)degree, numFaces * 2);
7189 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007190 }
7191 }
7192 }
7193 }
7194
7195 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7196 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007197 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007198 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007199 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007200
Shuzhen Wang14415f52016-11-16 18:26:18 -08007201 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7202 histogramBins = *histBins;
7203 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7204 }
7205
7206 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007207 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7208 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007209 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007210
7211 switch (stats_data->type) {
7212 case CAM_HISTOGRAM_TYPE_BAYER:
7213 switch (stats_data->bayer_stats.data_type) {
7214 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007215 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7216 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007217 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007218 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7219 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007220 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007221 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7222 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007223 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007224 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007225 case CAM_STATS_CHANNEL_R:
7226 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007227 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7228 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007229 }
7230 break;
7231 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007232 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007233 break;
7234 }
7235
Shuzhen Wang14415f52016-11-16 18:26:18 -08007236 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007237 }
7238 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007239 }
7240
7241 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7242 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7243 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7244 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7245 }
7246
7247 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7248 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7249 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7250 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7251 }
7252
7253 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7254 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7255 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7256 CAM_MAX_SHADING_MAP_HEIGHT);
7257 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7258 CAM_MAX_SHADING_MAP_WIDTH);
7259 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7260 lensShadingMap->lens_shading, 4U * map_width * map_height);
7261 }
7262
7263 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7264 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7265 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7266 }
7267
7268 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7269 //Populate CAM_INTF_META_TONEMAP_CURVES
7270 /* ch0 = G, ch 1 = B, ch 2 = R*/
7271 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7272 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7273 tonemap->tonemap_points_cnt,
7274 CAM_MAX_TONEMAP_CURVE_SIZE);
7275 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7276 }
7277
7278 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7279 &tonemap->curves[0].tonemap_points[0][0],
7280 tonemap->tonemap_points_cnt * 2);
7281
7282 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7283 &tonemap->curves[1].tonemap_points[0][0],
7284 tonemap->tonemap_points_cnt * 2);
7285
7286 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7287 &tonemap->curves[2].tonemap_points[0][0],
7288 tonemap->tonemap_points_cnt * 2);
7289 }
7290
7291 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7292 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7293 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7294 CC_GAIN_MAX);
7295 }
7296
7297 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7298 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7299 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7300 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7301 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7302 }
7303
7304 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7305 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7306 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7307 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7308 toneCurve->tonemap_points_cnt,
7309 CAM_MAX_TONEMAP_CURVE_SIZE);
7310 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7311 }
7312 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7313 (float*)toneCurve->curve.tonemap_points,
7314 toneCurve->tonemap_points_cnt * 2);
7315 }
7316
7317 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7318 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7319 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7320 predColorCorrectionGains->gains, 4);
7321 }
7322
7323 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7324 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7325 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7326 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7327 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7328 }
7329
7330 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7331 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7332 }
7333
7334 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7335 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7336 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7337 }
7338
7339 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7340 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7341 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7342 }
7343
7344 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7345 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7346 *effectMode);
7347 if (NAME_NOT_FOUND != val) {
7348 uint8_t fwk_effectMode = (uint8_t)val;
7349 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7350 }
7351 }
7352
7353 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7354 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7355 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7356 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7357 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7358 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7359 }
7360 int32_t fwk_testPatternData[4];
7361 fwk_testPatternData[0] = testPatternData->r;
7362 fwk_testPatternData[3] = testPatternData->b;
7363 switch (gCamCapability[mCameraId]->color_arrangement) {
7364 case CAM_FILTER_ARRANGEMENT_RGGB:
7365 case CAM_FILTER_ARRANGEMENT_GRBG:
7366 fwk_testPatternData[1] = testPatternData->gr;
7367 fwk_testPatternData[2] = testPatternData->gb;
7368 break;
7369 case CAM_FILTER_ARRANGEMENT_GBRG:
7370 case CAM_FILTER_ARRANGEMENT_BGGR:
7371 fwk_testPatternData[2] = testPatternData->gr;
7372 fwk_testPatternData[1] = testPatternData->gb;
7373 break;
7374 default:
7375 LOGE("color arrangement %d is not supported",
7376 gCamCapability[mCameraId]->color_arrangement);
7377 break;
7378 }
7379 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7380 }
7381
7382 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7383 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7384 }
7385
7386 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7387 String8 str((const char *)gps_methods);
7388 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7389 }
7390
7391 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7392 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7393 }
7394
7395 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7396 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7397 }
7398
7399 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7400 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7401 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7402 }
7403
7404 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7405 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7406 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7407 }
7408
7409 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7410 int32_t fwk_thumb_size[2];
7411 fwk_thumb_size[0] = thumb_size->width;
7412 fwk_thumb_size[1] = thumb_size->height;
7413 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7414 }
7415
7416 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7417 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7418 privateData,
7419 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7420 }
7421
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007422 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007423 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007424 meteringMode, 1);
7425 }
7426
Thierry Strudel54dc9782017-02-15 12:12:10 -08007427 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7428 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7429 LOGD("hdr_scene_data: %d %f\n",
7430 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7431 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7432 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7433 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7434 &isHdr, 1);
7435 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7436 &isHdrConfidence, 1);
7437 }
7438
7439
7440
Thierry Strudel3d639192016-09-09 11:52:26 -07007441 if (metadata->is_tuning_params_valid) {
7442 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7443 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7444 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7445
7446
7447 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7448 sizeof(uint32_t));
7449 data += sizeof(uint32_t);
7450
7451 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7452 sizeof(uint32_t));
7453 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7454 data += sizeof(uint32_t);
7455
7456 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7457 sizeof(uint32_t));
7458 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7459 data += sizeof(uint32_t);
7460
7461 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7462 sizeof(uint32_t));
7463 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7464 data += sizeof(uint32_t);
7465
7466 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7467 sizeof(uint32_t));
7468 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7469 data += sizeof(uint32_t);
7470
7471 metadata->tuning_params.tuning_mod3_data_size = 0;
7472 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7473 sizeof(uint32_t));
7474 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7475 data += sizeof(uint32_t);
7476
7477 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7478 TUNING_SENSOR_DATA_MAX);
7479 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7480 count);
7481 data += count;
7482
7483 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7484 TUNING_VFE_DATA_MAX);
7485 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7486 count);
7487 data += count;
7488
7489 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7490 TUNING_CPP_DATA_MAX);
7491 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7492 count);
7493 data += count;
7494
7495 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7496 TUNING_CAC_DATA_MAX);
7497 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7498 count);
7499 data += count;
7500
7501 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7502 (int32_t *)(void *)tuning_meta_data_blob,
7503 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7504 }
7505
7506 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7507 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7508 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7509 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7510 NEUTRAL_COL_POINTS);
7511 }
7512
7513 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7514 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7515 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7516 }
7517
7518 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7519 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7520 // Adjust crop region from sensor output coordinate system to active
7521 // array coordinate system.
7522 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7523 hAeRegions->rect.width, hAeRegions->rect.height);
7524
7525 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7526 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7527 REGIONS_TUPLE_COUNT);
7528 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7529 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7530 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7531 hAeRegions->rect.height);
7532 }
7533
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007534 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7535 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7536 if (NAME_NOT_FOUND != val) {
7537 uint8_t fwkAfMode = (uint8_t)val;
7538 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7539 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7540 } else {
7541 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7542 val);
7543 }
7544 }
7545
Thierry Strudel3d639192016-09-09 11:52:26 -07007546 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7547 uint8_t fwk_afState = (uint8_t) *afState;
7548 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007549 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007550 }
7551
7552 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7553 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7554 }
7555
7556 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7557 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7558 }
7559
7560 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7561 uint8_t fwk_lensState = *lensState;
7562 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7563 }
7564
Thierry Strudel3d639192016-09-09 11:52:26 -07007565
7566 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007567 uint32_t ab_mode = *hal_ab_mode;
7568 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7569 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7570 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7571 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007572 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007573 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007574 if (NAME_NOT_FOUND != val) {
7575 uint8_t fwk_ab_mode = (uint8_t)val;
7576 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7577 }
7578 }
7579
7580 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7581 int val = lookupFwkName(SCENE_MODES_MAP,
7582 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7583 if (NAME_NOT_FOUND != val) {
7584 uint8_t fwkBestshotMode = (uint8_t)val;
7585 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7586 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7587 } else {
7588 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7589 }
7590 }
7591
7592 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7593 uint8_t fwk_mode = (uint8_t) *mode;
7594 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7595 }
7596
7597 /* Constant metadata values to be update*/
7598 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7599 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7600
7601 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7602 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7603
7604 int32_t hotPixelMap[2];
7605 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7606
7607 // CDS
7608 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7609 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7610 }
7611
Thierry Strudel04e026f2016-10-10 11:27:36 -07007612 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7613 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007614 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007615 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7616 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7617 } else {
7618 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7619 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007620
7621 if(fwk_hdr != curr_hdr_state) {
7622 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7623 if(fwk_hdr)
7624 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7625 else
7626 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7627 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007628 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7629 }
7630
Thierry Strudel54dc9782017-02-15 12:12:10 -08007631 //binning correction
7632 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7633 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7634 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7635 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7636 }
7637
Thierry Strudel04e026f2016-10-10 11:27:36 -07007638 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007639 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007640 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7641 int8_t is_ir_on = 0;
7642
7643 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7644 if(is_ir_on != curr_ir_state) {
7645 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7646 if(is_ir_on)
7647 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7648 else
7649 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7650 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007651 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007652 }
7653
Thierry Strudel269c81a2016-10-12 12:13:59 -07007654 // AEC SPEED
7655 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7656 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7657 }
7658
7659 // AWB SPEED
7660 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7661 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7662 }
7663
Thierry Strudel3d639192016-09-09 11:52:26 -07007664 // TNR
7665 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7666 uint8_t tnr_enable = tnr->denoise_enable;
7667 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007668 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7669 int8_t is_tnr_on = 0;
7670
7671 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7672 if(is_tnr_on != curr_tnr_state) {
7673 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7674 if(is_tnr_on)
7675 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7676 else
7677 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7678 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007679
7680 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7681 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7682 }
7683
7684 // Reprocess crop data
7685 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7686 uint8_t cnt = crop_data->num_of_streams;
7687 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7688 // mm-qcamera-daemon only posts crop_data for streams
7689 // not linked to pproc. So no valid crop metadata is not
7690 // necessarily an error case.
7691 LOGD("No valid crop metadata entries");
7692 } else {
7693 uint32_t reproc_stream_id;
7694 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7695 LOGD("No reprocessible stream found, ignore crop data");
7696 } else {
7697 int rc = NO_ERROR;
7698 Vector<int32_t> roi_map;
7699 int32_t *crop = new int32_t[cnt*4];
7700 if (NULL == crop) {
7701 rc = NO_MEMORY;
7702 }
7703 if (NO_ERROR == rc) {
7704 int32_t streams_found = 0;
7705 for (size_t i = 0; i < cnt; i++) {
7706 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7707 if (pprocDone) {
7708 // HAL already does internal reprocessing,
7709 // either via reprocessing before JPEG encoding,
7710 // or offline postprocessing for pproc bypass case.
7711 crop[0] = 0;
7712 crop[1] = 0;
7713 crop[2] = mInputStreamInfo.dim.width;
7714 crop[3] = mInputStreamInfo.dim.height;
7715 } else {
7716 crop[0] = crop_data->crop_info[i].crop.left;
7717 crop[1] = crop_data->crop_info[i].crop.top;
7718 crop[2] = crop_data->crop_info[i].crop.width;
7719 crop[3] = crop_data->crop_info[i].crop.height;
7720 }
7721 roi_map.add(crop_data->crop_info[i].roi_map.left);
7722 roi_map.add(crop_data->crop_info[i].roi_map.top);
7723 roi_map.add(crop_data->crop_info[i].roi_map.width);
7724 roi_map.add(crop_data->crop_info[i].roi_map.height);
7725 streams_found++;
7726 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7727 crop[0], crop[1], crop[2], crop[3]);
7728 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7729 crop_data->crop_info[i].roi_map.left,
7730 crop_data->crop_info[i].roi_map.top,
7731 crop_data->crop_info[i].roi_map.width,
7732 crop_data->crop_info[i].roi_map.height);
7733 break;
7734
7735 }
7736 }
7737 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7738 &streams_found, 1);
7739 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7740 crop, (size_t)(streams_found * 4));
7741 if (roi_map.array()) {
7742 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7743 roi_map.array(), roi_map.size());
7744 }
7745 }
7746 if (crop) {
7747 delete [] crop;
7748 }
7749 }
7750 }
7751 }
7752
7753 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7754 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7755 // so hardcoding the CAC result to OFF mode.
7756 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7757 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7758 } else {
7759 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7760 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7761 *cacMode);
7762 if (NAME_NOT_FOUND != val) {
7763 uint8_t resultCacMode = (uint8_t)val;
7764 // check whether CAC result from CB is equal to Framework set CAC mode
7765 // If not equal then set the CAC mode came in corresponding request
7766 if (fwk_cacMode != resultCacMode) {
7767 resultCacMode = fwk_cacMode;
7768 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007769 //Check if CAC is disabled by property
7770 if (m_cacModeDisabled) {
7771 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7772 }
7773
Thierry Strudel3d639192016-09-09 11:52:26 -07007774 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7775 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7776 } else {
7777 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7778 }
7779 }
7780 }
7781
7782 // Post blob of cam_cds_data through vendor tag.
7783 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7784 uint8_t cnt = cdsInfo->num_of_streams;
7785 cam_cds_data_t cdsDataOverride;
7786 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7787 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7788 cdsDataOverride.num_of_streams = 1;
7789 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7790 uint32_t reproc_stream_id;
7791 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7792 LOGD("No reprocessible stream found, ignore cds data");
7793 } else {
7794 for (size_t i = 0; i < cnt; i++) {
7795 if (cdsInfo->cds_info[i].stream_id ==
7796 reproc_stream_id) {
7797 cdsDataOverride.cds_info[0].cds_enable =
7798 cdsInfo->cds_info[i].cds_enable;
7799 break;
7800 }
7801 }
7802 }
7803 } else {
7804 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7805 }
7806 camMetadata.update(QCAMERA3_CDS_INFO,
7807 (uint8_t *)&cdsDataOverride,
7808 sizeof(cam_cds_data_t));
7809 }
7810
7811 // Ldaf calibration data
7812 if (!mLdafCalibExist) {
7813 IF_META_AVAILABLE(uint32_t, ldafCalib,
7814 CAM_INTF_META_LDAF_EXIF, metadata) {
7815 mLdafCalibExist = true;
7816 mLdafCalib[0] = ldafCalib[0];
7817 mLdafCalib[1] = ldafCalib[1];
7818 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7819 ldafCalib[0], ldafCalib[1]);
7820 }
7821 }
7822
Thierry Strudel54dc9782017-02-15 12:12:10 -08007823 // EXIF debug data through vendor tag
7824 /*
7825 * Mobicat Mask can assume 3 values:
7826 * 1 refers to Mobicat data,
7827 * 2 refers to Stats Debug and Exif Debug Data
7828 * 3 refers to Mobicat and Stats Debug Data
7829 * We want to make sure that we are sending Exif debug data
7830 * only when Mobicat Mask is 2.
7831 */
7832 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7833 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7834 (uint8_t *)(void *)mExifParams.debug_params,
7835 sizeof(mm_jpeg_debug_exif_params_t));
7836 }
7837
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007838 // Reprocess and DDM debug data through vendor tag
7839 cam_reprocess_info_t repro_info;
7840 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007841 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7842 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007843 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007844 }
7845 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7846 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007847 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007848 }
7849 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7850 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007851 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007852 }
7853 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7854 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007855 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007856 }
7857 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7858 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007859 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007860 }
7861 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007862 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007863 }
7864 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7865 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007866 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007867 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007868 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7869 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7870 }
7871 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7872 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7873 }
7874 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7875 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007876
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007877 // INSTANT AEC MODE
7878 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7879 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7880 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7881 }
7882
Shuzhen Wange763e802016-03-31 10:24:29 -07007883 // AF scene change
7884 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7885 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7886 }
7887
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07007888 // Enable ZSL
7889 if (enableZsl != nullptr) {
7890 uint8_t value = *enableZsl ?
7891 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
7892 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
7893 }
7894
Thierry Strudel3d639192016-09-09 11:52:26 -07007895 resultMetadata = camMetadata.release();
7896 return resultMetadata;
7897}
7898
7899/*===========================================================================
7900 * FUNCTION : saveExifParams
7901 *
7902 * DESCRIPTION:
7903 *
7904 * PARAMETERS :
7905 * @metadata : metadata information from callback
7906 *
7907 * RETURN : none
7908 *
7909 *==========================================================================*/
7910void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7911{
7912 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7913 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7914 if (mExifParams.debug_params) {
7915 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7916 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7917 }
7918 }
7919 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7920 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7921 if (mExifParams.debug_params) {
7922 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7923 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7924 }
7925 }
7926 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7927 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7928 if (mExifParams.debug_params) {
7929 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7930 mExifParams.debug_params->af_debug_params_valid = TRUE;
7931 }
7932 }
7933 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7934 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7935 if (mExifParams.debug_params) {
7936 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7937 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7938 }
7939 }
7940 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7941 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7942 if (mExifParams.debug_params) {
7943 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7944 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7945 }
7946 }
7947 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7948 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7949 if (mExifParams.debug_params) {
7950 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7951 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7952 }
7953 }
7954 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7955 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7956 if (mExifParams.debug_params) {
7957 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7958 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7959 }
7960 }
7961 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7962 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7963 if (mExifParams.debug_params) {
7964 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7965 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7966 }
7967 }
7968}
7969
7970/*===========================================================================
7971 * FUNCTION : get3AExifParams
7972 *
7973 * DESCRIPTION:
7974 *
7975 * PARAMETERS : none
7976 *
7977 *
7978 * RETURN : mm_jpeg_exif_params_t
7979 *
7980 *==========================================================================*/
7981mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7982{
7983 return mExifParams;
7984}
7985
7986/*===========================================================================
7987 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7988 *
7989 * DESCRIPTION:
7990 *
7991 * PARAMETERS :
7992 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07007993 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
7994 * urgent metadata in a batch. Always true for
7995 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07007996 *
7997 * RETURN : camera_metadata_t*
7998 * metadata in a format specified by fwk
7999 *==========================================================================*/
8000camera_metadata_t*
8001QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008002 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008003{
8004 CameraMetadata camMetadata;
8005 camera_metadata_t *resultMetadata;
8006
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008007 if (!lastUrgentMetadataInBatch) {
8008 /* In batch mode, use empty metadata if this is not the last in batch
8009 */
8010 resultMetadata = allocate_camera_metadata(0, 0);
8011 return resultMetadata;
8012 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008013
8014 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8015 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8016 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8017 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8018 }
8019
8020 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8021 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8022 &aecTrigger->trigger, 1);
8023 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8024 &aecTrigger->trigger_id, 1);
8025 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8026 aecTrigger->trigger);
8027 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8028 aecTrigger->trigger_id);
8029 }
8030
8031 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8032 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8033 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8034 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8035 }
8036
Thierry Strudel3d639192016-09-09 11:52:26 -07008037 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8038 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8039 &af_trigger->trigger, 1);
8040 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8041 af_trigger->trigger);
8042 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8043 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8044 af_trigger->trigger_id);
8045 }
8046
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008047 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8048 /*af regions*/
8049 int32_t afRegions[REGIONS_TUPLE_COUNT];
8050 // Adjust crop region from sensor output coordinate system to active
8051 // array coordinate system.
8052 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8053 hAfRegions->rect.width, hAfRegions->rect.height);
8054
8055 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8056 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8057 REGIONS_TUPLE_COUNT);
8058 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8059 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8060 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8061 hAfRegions->rect.height);
8062 }
8063
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008064 // AF region confidence
8065 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8066 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8067 }
8068
Thierry Strudel3d639192016-09-09 11:52:26 -07008069 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8070 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8071 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8072 if (NAME_NOT_FOUND != val) {
8073 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8074 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8075 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8076 } else {
8077 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8078 }
8079 }
8080
8081 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8082 uint32_t aeMode = CAM_AE_MODE_MAX;
8083 int32_t flashMode = CAM_FLASH_MODE_MAX;
8084 int32_t redeye = -1;
8085 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8086 aeMode = *pAeMode;
8087 }
8088 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8089 flashMode = *pFlashMode;
8090 }
8091 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8092 redeye = *pRedeye;
8093 }
8094
8095 if (1 == redeye) {
8096 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8097 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8098 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8099 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8100 flashMode);
8101 if (NAME_NOT_FOUND != val) {
8102 fwk_aeMode = (uint8_t)val;
8103 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8104 } else {
8105 LOGE("Unsupported flash mode %d", flashMode);
8106 }
8107 } else if (aeMode == CAM_AE_MODE_ON) {
8108 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8109 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8110 } else if (aeMode == CAM_AE_MODE_OFF) {
8111 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8112 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008113 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8114 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8115 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008116 } else {
8117 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8118 "flashMode:%d, aeMode:%u!!!",
8119 redeye, flashMode, aeMode);
8120 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008121 if (mInstantAEC) {
8122 // Increment frame Idx count untill a bound reached for instant AEC.
8123 mInstantAecFrameIdxCount++;
8124 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8125 CAM_INTF_META_AEC_INFO, metadata) {
8126 LOGH("ae_params->settled = %d",ae_params->settled);
8127 // If AEC settled, or if number of frames reached bound value,
8128 // should reset instant AEC.
8129 if (ae_params->settled ||
8130 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8131 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8132 mInstantAEC = false;
8133 mResetInstantAEC = true;
8134 mInstantAecFrameIdxCount = 0;
8135 }
8136 }
8137 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008138 resultMetadata = camMetadata.release();
8139 return resultMetadata;
8140}
8141
8142/*===========================================================================
8143 * FUNCTION : dumpMetadataToFile
8144 *
8145 * DESCRIPTION: Dumps tuning metadata to file system
8146 *
8147 * PARAMETERS :
8148 * @meta : tuning metadata
8149 * @dumpFrameCount : current dump frame count
8150 * @enabled : Enable mask
8151 *
8152 *==========================================================================*/
8153void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8154 uint32_t &dumpFrameCount,
8155 bool enabled,
8156 const char *type,
8157 uint32_t frameNumber)
8158{
8159 //Some sanity checks
8160 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8161 LOGE("Tuning sensor data size bigger than expected %d: %d",
8162 meta.tuning_sensor_data_size,
8163 TUNING_SENSOR_DATA_MAX);
8164 return;
8165 }
8166
8167 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8168 LOGE("Tuning VFE data size bigger than expected %d: %d",
8169 meta.tuning_vfe_data_size,
8170 TUNING_VFE_DATA_MAX);
8171 return;
8172 }
8173
8174 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8175 LOGE("Tuning CPP data size bigger than expected %d: %d",
8176 meta.tuning_cpp_data_size,
8177 TUNING_CPP_DATA_MAX);
8178 return;
8179 }
8180
8181 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8182 LOGE("Tuning CAC data size bigger than expected %d: %d",
8183 meta.tuning_cac_data_size,
8184 TUNING_CAC_DATA_MAX);
8185 return;
8186 }
8187 //
8188
8189 if(enabled){
8190 char timeBuf[FILENAME_MAX];
8191 char buf[FILENAME_MAX];
8192 memset(buf, 0, sizeof(buf));
8193 memset(timeBuf, 0, sizeof(timeBuf));
8194 time_t current_time;
8195 struct tm * timeinfo;
8196 time (&current_time);
8197 timeinfo = localtime (&current_time);
8198 if (timeinfo != NULL) {
8199 strftime (timeBuf, sizeof(timeBuf),
8200 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8201 }
8202 String8 filePath(timeBuf);
8203 snprintf(buf,
8204 sizeof(buf),
8205 "%dm_%s_%d.bin",
8206 dumpFrameCount,
8207 type,
8208 frameNumber);
8209 filePath.append(buf);
8210 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8211 if (file_fd >= 0) {
8212 ssize_t written_len = 0;
8213 meta.tuning_data_version = TUNING_DATA_VERSION;
8214 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8215 written_len += write(file_fd, data, sizeof(uint32_t));
8216 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8217 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8218 written_len += write(file_fd, data, sizeof(uint32_t));
8219 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8220 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8221 written_len += write(file_fd, data, sizeof(uint32_t));
8222 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8223 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8224 written_len += write(file_fd, data, sizeof(uint32_t));
8225 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8226 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8227 written_len += write(file_fd, data, sizeof(uint32_t));
8228 meta.tuning_mod3_data_size = 0;
8229 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8230 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8231 written_len += write(file_fd, data, sizeof(uint32_t));
8232 size_t total_size = meta.tuning_sensor_data_size;
8233 data = (void *)((uint8_t *)&meta.data);
8234 written_len += write(file_fd, data, total_size);
8235 total_size = meta.tuning_vfe_data_size;
8236 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8237 written_len += write(file_fd, data, total_size);
8238 total_size = meta.tuning_cpp_data_size;
8239 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8240 written_len += write(file_fd, data, total_size);
8241 total_size = meta.tuning_cac_data_size;
8242 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8243 written_len += write(file_fd, data, total_size);
8244 close(file_fd);
8245 }else {
8246 LOGE("fail to open file for metadata dumping");
8247 }
8248 }
8249}
8250
8251/*===========================================================================
8252 * FUNCTION : cleanAndSortStreamInfo
8253 *
8254 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8255 * and sort them such that raw stream is at the end of the list
8256 * This is a workaround for camera daemon constraint.
8257 *
8258 * PARAMETERS : None
8259 *
8260 *==========================================================================*/
8261void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8262{
8263 List<stream_info_t *> newStreamInfo;
8264
8265 /*clean up invalid streams*/
8266 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8267 it != mStreamInfo.end();) {
8268 if(((*it)->status) == INVALID){
8269 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8270 delete channel;
8271 free(*it);
8272 it = mStreamInfo.erase(it);
8273 } else {
8274 it++;
8275 }
8276 }
8277
8278 // Move preview/video/callback/snapshot streams into newList
8279 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8280 it != mStreamInfo.end();) {
8281 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8282 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8283 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8284 newStreamInfo.push_back(*it);
8285 it = mStreamInfo.erase(it);
8286 } else
8287 it++;
8288 }
8289 // Move raw streams into newList
8290 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8291 it != mStreamInfo.end();) {
8292 newStreamInfo.push_back(*it);
8293 it = mStreamInfo.erase(it);
8294 }
8295
8296 mStreamInfo = newStreamInfo;
8297}
8298
8299/*===========================================================================
8300 * FUNCTION : extractJpegMetadata
8301 *
8302 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8303 * JPEG metadata is cached in HAL, and return as part of capture
8304 * result when metadata is returned from camera daemon.
8305 *
8306 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8307 * @request: capture request
8308 *
8309 *==========================================================================*/
8310void QCamera3HardwareInterface::extractJpegMetadata(
8311 CameraMetadata& jpegMetadata,
8312 const camera3_capture_request_t *request)
8313{
8314 CameraMetadata frame_settings;
8315 frame_settings = request->settings;
8316
8317 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8318 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8319 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8320 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8321
8322 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8323 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8324 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8325 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8326
8327 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8328 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8329 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8330 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8331
8332 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8333 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8334 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8335 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8336
8337 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8338 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8339 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8340 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8341
8342 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8343 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8344 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8345 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8346
8347 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8348 int32_t thumbnail_size[2];
8349 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8350 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8351 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8352 int32_t orientation =
8353 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008354 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008355 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8356 int32_t temp;
8357 temp = thumbnail_size[0];
8358 thumbnail_size[0] = thumbnail_size[1];
8359 thumbnail_size[1] = temp;
8360 }
8361 }
8362 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8363 thumbnail_size,
8364 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8365 }
8366
8367}
8368
8369/*===========================================================================
8370 * FUNCTION : convertToRegions
8371 *
8372 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8373 *
8374 * PARAMETERS :
8375 * @rect : cam_rect_t struct to convert
8376 * @region : int32_t destination array
8377 * @weight : if we are converting from cam_area_t, weight is valid
8378 * else weight = -1
8379 *
8380 *==========================================================================*/
8381void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8382 int32_t *region, int weight)
8383{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008384 region[FACE_LEFT] = rect.left;
8385 region[FACE_TOP] = rect.top;
8386 region[FACE_RIGHT] = rect.left + rect.width;
8387 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008388 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008389 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008390 }
8391}
8392
8393/*===========================================================================
8394 * FUNCTION : convertFromRegions
8395 *
8396 * DESCRIPTION: helper method to convert from array to cam_rect_t
8397 *
8398 * PARAMETERS :
8399 * @rect : cam_rect_t struct to convert
8400 * @region : int32_t destination array
8401 * @weight : if we are converting from cam_area_t, weight is valid
8402 * else weight = -1
8403 *
8404 *==========================================================================*/
8405void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008406 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008407{
Thierry Strudel3d639192016-09-09 11:52:26 -07008408 int32_t x_min = frame_settings.find(tag).data.i32[0];
8409 int32_t y_min = frame_settings.find(tag).data.i32[1];
8410 int32_t x_max = frame_settings.find(tag).data.i32[2];
8411 int32_t y_max = frame_settings.find(tag).data.i32[3];
8412 roi.weight = frame_settings.find(tag).data.i32[4];
8413 roi.rect.left = x_min;
8414 roi.rect.top = y_min;
8415 roi.rect.width = x_max - x_min;
8416 roi.rect.height = y_max - y_min;
8417}
8418
8419/*===========================================================================
8420 * FUNCTION : resetIfNeededROI
8421 *
8422 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8423 * crop region
8424 *
8425 * PARAMETERS :
8426 * @roi : cam_area_t struct to resize
8427 * @scalerCropRegion : cam_crop_region_t region to compare against
8428 *
8429 *
8430 *==========================================================================*/
8431bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8432 const cam_crop_region_t* scalerCropRegion)
8433{
8434 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8435 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8436 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8437 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8438
8439 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8440 * without having this check the calculations below to validate if the roi
8441 * is inside scalar crop region will fail resulting in the roi not being
8442 * reset causing algorithm to continue to use stale roi window
8443 */
8444 if (roi->weight == 0) {
8445 return true;
8446 }
8447
8448 if ((roi_x_max < scalerCropRegion->left) ||
8449 // right edge of roi window is left of scalar crop's left edge
8450 (roi_y_max < scalerCropRegion->top) ||
8451 // bottom edge of roi window is above scalar crop's top edge
8452 (roi->rect.left > crop_x_max) ||
8453 // left edge of roi window is beyond(right) of scalar crop's right edge
8454 (roi->rect.top > crop_y_max)){
8455 // top edge of roi windo is above scalar crop's top edge
8456 return false;
8457 }
8458 if (roi->rect.left < scalerCropRegion->left) {
8459 roi->rect.left = scalerCropRegion->left;
8460 }
8461 if (roi->rect.top < scalerCropRegion->top) {
8462 roi->rect.top = scalerCropRegion->top;
8463 }
8464 if (roi_x_max > crop_x_max) {
8465 roi_x_max = crop_x_max;
8466 }
8467 if (roi_y_max > crop_y_max) {
8468 roi_y_max = crop_y_max;
8469 }
8470 roi->rect.width = roi_x_max - roi->rect.left;
8471 roi->rect.height = roi_y_max - roi->rect.top;
8472 return true;
8473}
8474
8475/*===========================================================================
8476 * FUNCTION : convertLandmarks
8477 *
8478 * DESCRIPTION: helper method to extract the landmarks from face detection info
8479 *
8480 * PARAMETERS :
8481 * @landmark_data : input landmark data to be converted
8482 * @landmarks : int32_t destination array
8483 *
8484 *
8485 *==========================================================================*/
8486void QCamera3HardwareInterface::convertLandmarks(
8487 cam_face_landmarks_info_t landmark_data,
8488 int32_t *landmarks)
8489{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008490 if (landmark_data.is_left_eye_valid) {
8491 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8492 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8493 } else {
8494 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8495 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8496 }
8497
8498 if (landmark_data.is_right_eye_valid) {
8499 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8500 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8501 } else {
8502 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8503 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8504 }
8505
8506 if (landmark_data.is_mouth_valid) {
8507 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8508 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8509 } else {
8510 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8511 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8512 }
8513}
8514
8515/*===========================================================================
8516 * FUNCTION : setInvalidLandmarks
8517 *
8518 * DESCRIPTION: helper method to set invalid landmarks
8519 *
8520 * PARAMETERS :
8521 * @landmarks : int32_t destination array
8522 *
8523 *
8524 *==========================================================================*/
8525void QCamera3HardwareInterface::setInvalidLandmarks(
8526 int32_t *landmarks)
8527{
8528 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8529 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8530 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8531 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8532 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8533 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008534}
8535
8536#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008537
8538/*===========================================================================
8539 * FUNCTION : getCapabilities
8540 *
8541 * DESCRIPTION: query camera capability from back-end
8542 *
8543 * PARAMETERS :
8544 * @ops : mm-interface ops structure
8545 * @cam_handle : camera handle for which we need capability
8546 *
8547 * RETURN : ptr type of capability structure
8548 * capability for success
8549 * NULL for failure
8550 *==========================================================================*/
8551cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8552 uint32_t cam_handle)
8553{
8554 int rc = NO_ERROR;
8555 QCamera3HeapMemory *capabilityHeap = NULL;
8556 cam_capability_t *cap_ptr = NULL;
8557
8558 if (ops == NULL) {
8559 LOGE("Invalid arguments");
8560 return NULL;
8561 }
8562
8563 capabilityHeap = new QCamera3HeapMemory(1);
8564 if (capabilityHeap == NULL) {
8565 LOGE("creation of capabilityHeap failed");
8566 return NULL;
8567 }
8568
8569 /* Allocate memory for capability buffer */
8570 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8571 if(rc != OK) {
8572 LOGE("No memory for cappability");
8573 goto allocate_failed;
8574 }
8575
8576 /* Map memory for capability buffer */
8577 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8578
8579 rc = ops->map_buf(cam_handle,
8580 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8581 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8582 if(rc < 0) {
8583 LOGE("failed to map capability buffer");
8584 rc = FAILED_TRANSACTION;
8585 goto map_failed;
8586 }
8587
8588 /* Query Capability */
8589 rc = ops->query_capability(cam_handle);
8590 if(rc < 0) {
8591 LOGE("failed to query capability");
8592 rc = FAILED_TRANSACTION;
8593 goto query_failed;
8594 }
8595
8596 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8597 if (cap_ptr == NULL) {
8598 LOGE("out of memory");
8599 rc = NO_MEMORY;
8600 goto query_failed;
8601 }
8602
8603 memset(cap_ptr, 0, sizeof(cam_capability_t));
8604 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8605
8606 int index;
8607 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8608 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8609 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8610 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8611 }
8612
8613query_failed:
8614 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8615map_failed:
8616 capabilityHeap->deallocate();
8617allocate_failed:
8618 delete capabilityHeap;
8619
8620 if (rc != NO_ERROR) {
8621 return NULL;
8622 } else {
8623 return cap_ptr;
8624 }
8625}
8626
Thierry Strudel3d639192016-09-09 11:52:26 -07008627/*===========================================================================
8628 * FUNCTION : initCapabilities
8629 *
8630 * DESCRIPTION: initialize camera capabilities in static data struct
8631 *
8632 * PARAMETERS :
8633 * @cameraId : camera Id
8634 *
8635 * RETURN : int32_t type of status
8636 * NO_ERROR -- success
8637 * none-zero failure code
8638 *==========================================================================*/
8639int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8640{
8641 int rc = 0;
8642 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008643 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008644
8645 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8646 if (rc) {
8647 LOGE("camera_open failed. rc = %d", rc);
8648 goto open_failed;
8649 }
8650 if (!cameraHandle) {
8651 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8652 goto open_failed;
8653 }
8654
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008655 handle = get_main_camera_handle(cameraHandle->camera_handle);
8656 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8657 if (gCamCapability[cameraId] == NULL) {
8658 rc = FAILED_TRANSACTION;
8659 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008660 }
8661
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008662 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008663 if (is_dual_camera_by_idx(cameraId)) {
8664 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8665 gCamCapability[cameraId]->aux_cam_cap =
8666 getCapabilities(cameraHandle->ops, handle);
8667 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8668 rc = FAILED_TRANSACTION;
8669 free(gCamCapability[cameraId]);
8670 goto failed_op;
8671 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008672
8673 // Copy the main camera capability to main_cam_cap struct
8674 gCamCapability[cameraId]->main_cam_cap =
8675 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8676 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8677 LOGE("out of memory");
8678 rc = NO_MEMORY;
8679 goto failed_op;
8680 }
8681 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8682 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008683 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008684failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008685 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8686 cameraHandle = NULL;
8687open_failed:
8688 return rc;
8689}
8690
8691/*==========================================================================
8692 * FUNCTION : get3Aversion
8693 *
8694 * DESCRIPTION: get the Q3A S/W version
8695 *
8696 * PARAMETERS :
8697 * @sw_version: Reference of Q3A structure which will hold version info upon
8698 * return
8699 *
8700 * RETURN : None
8701 *
8702 *==========================================================================*/
8703void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8704{
8705 if(gCamCapability[mCameraId])
8706 sw_version = gCamCapability[mCameraId]->q3a_version;
8707 else
8708 LOGE("Capability structure NULL!");
8709}
8710
8711
8712/*===========================================================================
8713 * FUNCTION : initParameters
8714 *
8715 * DESCRIPTION: initialize camera parameters
8716 *
8717 * PARAMETERS :
8718 *
8719 * RETURN : int32_t type of status
8720 * NO_ERROR -- success
8721 * none-zero failure code
8722 *==========================================================================*/
8723int QCamera3HardwareInterface::initParameters()
8724{
8725 int rc = 0;
8726
8727 //Allocate Set Param Buffer
8728 mParamHeap = new QCamera3HeapMemory(1);
8729 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8730 if(rc != OK) {
8731 rc = NO_MEMORY;
8732 LOGE("Failed to allocate SETPARM Heap memory");
8733 delete mParamHeap;
8734 mParamHeap = NULL;
8735 return rc;
8736 }
8737
8738 //Map memory for parameters buffer
8739 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8740 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8741 mParamHeap->getFd(0),
8742 sizeof(metadata_buffer_t),
8743 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8744 if(rc < 0) {
8745 LOGE("failed to map SETPARM buffer");
8746 rc = FAILED_TRANSACTION;
8747 mParamHeap->deallocate();
8748 delete mParamHeap;
8749 mParamHeap = NULL;
8750 return rc;
8751 }
8752
8753 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8754
8755 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8756 return rc;
8757}
8758
8759/*===========================================================================
8760 * FUNCTION : deinitParameters
8761 *
8762 * DESCRIPTION: de-initialize camera parameters
8763 *
8764 * PARAMETERS :
8765 *
8766 * RETURN : NONE
8767 *==========================================================================*/
8768void QCamera3HardwareInterface::deinitParameters()
8769{
8770 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8771 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8772
8773 mParamHeap->deallocate();
8774 delete mParamHeap;
8775 mParamHeap = NULL;
8776
8777 mParameters = NULL;
8778
8779 free(mPrevParameters);
8780 mPrevParameters = NULL;
8781}
8782
8783/*===========================================================================
8784 * FUNCTION : calcMaxJpegSize
8785 *
8786 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8787 *
8788 * PARAMETERS :
8789 *
8790 * RETURN : max_jpeg_size
8791 *==========================================================================*/
8792size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8793{
8794 size_t max_jpeg_size = 0;
8795 size_t temp_width, temp_height;
8796 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8797 MAX_SIZES_CNT);
8798 for (size_t i = 0; i < count; i++) {
8799 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8800 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8801 if (temp_width * temp_height > max_jpeg_size ) {
8802 max_jpeg_size = temp_width * temp_height;
8803 }
8804 }
8805 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8806 return max_jpeg_size;
8807}
8808
8809/*===========================================================================
8810 * FUNCTION : getMaxRawSize
8811 *
8812 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8813 *
8814 * PARAMETERS :
8815 *
8816 * RETURN : Largest supported Raw Dimension
8817 *==========================================================================*/
8818cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8819{
8820 int max_width = 0;
8821 cam_dimension_t maxRawSize;
8822
8823 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8824 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8825 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8826 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8827 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8828 }
8829 }
8830 return maxRawSize;
8831}
8832
8833
8834/*===========================================================================
8835 * FUNCTION : calcMaxJpegDim
8836 *
8837 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8838 *
8839 * PARAMETERS :
8840 *
8841 * RETURN : max_jpeg_dim
8842 *==========================================================================*/
8843cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8844{
8845 cam_dimension_t max_jpeg_dim;
8846 cam_dimension_t curr_jpeg_dim;
8847 max_jpeg_dim.width = 0;
8848 max_jpeg_dim.height = 0;
8849 curr_jpeg_dim.width = 0;
8850 curr_jpeg_dim.height = 0;
8851 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8852 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8853 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8854 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8855 max_jpeg_dim.width * max_jpeg_dim.height ) {
8856 max_jpeg_dim.width = curr_jpeg_dim.width;
8857 max_jpeg_dim.height = curr_jpeg_dim.height;
8858 }
8859 }
8860 return max_jpeg_dim;
8861}
8862
8863/*===========================================================================
8864 * FUNCTION : addStreamConfig
8865 *
8866 * DESCRIPTION: adds the stream configuration to the array
8867 *
8868 * PARAMETERS :
8869 * @available_stream_configs : pointer to stream configuration array
8870 * @scalar_format : scalar format
8871 * @dim : configuration dimension
8872 * @config_type : input or output configuration type
8873 *
8874 * RETURN : NONE
8875 *==========================================================================*/
8876void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8877 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8878{
8879 available_stream_configs.add(scalar_format);
8880 available_stream_configs.add(dim.width);
8881 available_stream_configs.add(dim.height);
8882 available_stream_configs.add(config_type);
8883}
8884
8885/*===========================================================================
8886 * FUNCTION : suppportBurstCapture
8887 *
8888 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8889 *
8890 * PARAMETERS :
8891 * @cameraId : camera Id
8892 *
8893 * RETURN : true if camera supports BURST_CAPTURE
8894 * false otherwise
8895 *==========================================================================*/
8896bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8897{
8898 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8899 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8900 const int32_t highResWidth = 3264;
8901 const int32_t highResHeight = 2448;
8902
8903 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8904 // Maximum resolution images cannot be captured at >= 10fps
8905 // -> not supporting BURST_CAPTURE
8906 return false;
8907 }
8908
8909 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8910 // Maximum resolution images can be captured at >= 20fps
8911 // --> supporting BURST_CAPTURE
8912 return true;
8913 }
8914
8915 // Find the smallest highRes resolution, or largest resolution if there is none
8916 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8917 MAX_SIZES_CNT);
8918 size_t highRes = 0;
8919 while ((highRes + 1 < totalCnt) &&
8920 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8921 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8922 highResWidth * highResHeight)) {
8923 highRes++;
8924 }
8925 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8926 return true;
8927 } else {
8928 return false;
8929 }
8930}
8931
8932/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008933 * FUNCTION : getPDStatIndex
8934 *
8935 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8936 *
8937 * PARAMETERS :
8938 * @caps : camera capabilities
8939 *
8940 * RETURN : int32_t type
8941 * non-negative - on success
8942 * -1 - on failure
8943 *==========================================================================*/
8944int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
8945 if (nullptr == caps) {
8946 return -1;
8947 }
8948
8949 uint32_t metaRawCount = caps->meta_raw_channel_count;
8950 int32_t ret = -1;
8951 for (size_t i = 0; i < metaRawCount; i++) {
8952 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
8953 ret = i;
8954 break;
8955 }
8956 }
8957
8958 return ret;
8959}
8960
8961/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07008962 * FUNCTION : initStaticMetadata
8963 *
8964 * DESCRIPTION: initialize the static metadata
8965 *
8966 * PARAMETERS :
8967 * @cameraId : camera Id
8968 *
8969 * RETURN : int32_t type of status
8970 * 0 -- success
8971 * non-zero failure code
8972 *==========================================================================*/
8973int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8974{
8975 int rc = 0;
8976 CameraMetadata staticInfo;
8977 size_t count = 0;
8978 bool limitedDevice = false;
8979 char prop[PROPERTY_VALUE_MAX];
8980 bool supportBurst = false;
8981
8982 supportBurst = supportBurstCapture(cameraId);
8983
8984 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8985 * guaranteed or if min fps of max resolution is less than 20 fps, its
8986 * advertised as limited device*/
8987 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8988 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8989 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8990 !supportBurst;
8991
8992 uint8_t supportedHwLvl = limitedDevice ?
8993 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008994#ifndef USE_HAL_3_3
8995 // LEVEL_3 - This device will support level 3.
8996 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8997#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008998 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008999#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009000
9001 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9002 &supportedHwLvl, 1);
9003
9004 bool facingBack = false;
9005 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9006 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9007 facingBack = true;
9008 }
9009 /*HAL 3 only*/
9010 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9011 &gCamCapability[cameraId]->min_focus_distance, 1);
9012
9013 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9014 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9015
9016 /*should be using focal lengths but sensor doesn't provide that info now*/
9017 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9018 &gCamCapability[cameraId]->focal_length,
9019 1);
9020
9021 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9022 gCamCapability[cameraId]->apertures,
9023 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9024
9025 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9026 gCamCapability[cameraId]->filter_densities,
9027 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9028
9029
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009030 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9031 size_t mode_count =
9032 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9033 for (size_t i = 0; i < mode_count; i++) {
9034 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9035 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009036 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009037 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009038
9039 int32_t lens_shading_map_size[] = {
9040 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9041 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9042 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9043 lens_shading_map_size,
9044 sizeof(lens_shading_map_size)/sizeof(int32_t));
9045
9046 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9047 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9048
9049 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9050 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9051
9052 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9053 &gCamCapability[cameraId]->max_frame_duration, 1);
9054
9055 camera_metadata_rational baseGainFactor = {
9056 gCamCapability[cameraId]->base_gain_factor.numerator,
9057 gCamCapability[cameraId]->base_gain_factor.denominator};
9058 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9059 &baseGainFactor, 1);
9060
9061 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9062 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9063
9064 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9065 gCamCapability[cameraId]->pixel_array_size.height};
9066 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9067 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9068
9069 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9070 gCamCapability[cameraId]->active_array_size.top,
9071 gCamCapability[cameraId]->active_array_size.width,
9072 gCamCapability[cameraId]->active_array_size.height};
9073 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9074 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9075
9076 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9077 &gCamCapability[cameraId]->white_level, 1);
9078
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009079 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9080 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9081 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009082 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009083 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009084
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009085#ifndef USE_HAL_3_3
9086 bool hasBlackRegions = false;
9087 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9088 LOGW("black_region_count: %d is bounded to %d",
9089 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9090 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9091 }
9092 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9093 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9094 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9095 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9096 }
9097 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9098 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9099 hasBlackRegions = true;
9100 }
9101#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009102 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9103 &gCamCapability[cameraId]->flash_charge_duration, 1);
9104
9105 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9106 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9107
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009108 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9109 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9110 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009111 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9112 &timestampSource, 1);
9113
Thierry Strudel54dc9782017-02-15 12:12:10 -08009114 //update histogram vendor data
9115 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009116 &gCamCapability[cameraId]->histogram_size, 1);
9117
Thierry Strudel54dc9782017-02-15 12:12:10 -08009118 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009119 &gCamCapability[cameraId]->max_histogram_count, 1);
9120
Shuzhen Wang14415f52016-11-16 18:26:18 -08009121 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9122 //so that app can request fewer number of bins than the maximum supported.
9123 std::vector<int32_t> histBins;
9124 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9125 histBins.push_back(maxHistBins);
9126 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9127 (maxHistBins & 0x1) == 0) {
9128 histBins.push_back(maxHistBins >> 1);
9129 maxHistBins >>= 1;
9130 }
9131 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9132 histBins.data(), histBins.size());
9133
Thierry Strudel3d639192016-09-09 11:52:26 -07009134 int32_t sharpness_map_size[] = {
9135 gCamCapability[cameraId]->sharpness_map_size.width,
9136 gCamCapability[cameraId]->sharpness_map_size.height};
9137
9138 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9139 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9140
9141 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9142 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9143
Emilian Peev0f3c3162017-03-15 12:57:46 +00009144 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9145 if (0 <= indexPD) {
9146 // Advertise PD stats data as part of the Depth capabilities
9147 int32_t depthWidth =
9148 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9149 int32_t depthHeight =
9150 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9151 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9152 assert(0 < depthSamplesCount);
9153 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9154 &depthSamplesCount, 1);
9155
9156 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9157 depthHeight,
9158 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9159 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9160 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9161 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9162 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9163
9164 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9165 depthHeight, 33333333,
9166 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9167 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9168 depthMinDuration,
9169 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9170
9171 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9172 depthHeight, 0,
9173 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9174 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9175 depthStallDuration,
9176 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9177
9178 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9179 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9180 }
9181
Thierry Strudel3d639192016-09-09 11:52:26 -07009182 int32_t scalar_formats[] = {
9183 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9184 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9185 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9186 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9187 HAL_PIXEL_FORMAT_RAW10,
9188 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009189 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9190 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9191 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009192
9193 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9194 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9195 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9196 count, MAX_SIZES_CNT, available_processed_sizes);
9197 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9198 available_processed_sizes, count * 2);
9199
9200 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9201 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9202 makeTable(gCamCapability[cameraId]->raw_dim,
9203 count, MAX_SIZES_CNT, available_raw_sizes);
9204 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9205 available_raw_sizes, count * 2);
9206
9207 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9208 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9209 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9210 count, MAX_SIZES_CNT, available_fps_ranges);
9211 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9212 available_fps_ranges, count * 2);
9213
9214 camera_metadata_rational exposureCompensationStep = {
9215 gCamCapability[cameraId]->exp_compensation_step.numerator,
9216 gCamCapability[cameraId]->exp_compensation_step.denominator};
9217 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9218 &exposureCompensationStep, 1);
9219
9220 Vector<uint8_t> availableVstabModes;
9221 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9222 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009223 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009224 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009225 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009226 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009227 count = IS_TYPE_MAX;
9228 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9229 for (size_t i = 0; i < count; i++) {
9230 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9231 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9232 eisSupported = true;
9233 break;
9234 }
9235 }
9236 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009237 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9238 }
9239 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9240 availableVstabModes.array(), availableVstabModes.size());
9241
9242 /*HAL 1 and HAL 3 common*/
9243 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9244 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9245 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009246 // Cap the max zoom to the max preferred value
9247 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009248 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9249 &maxZoom, 1);
9250
9251 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9252 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9253
9254 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9255 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9256 max3aRegions[2] = 0; /* AF not supported */
9257 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9258 max3aRegions, 3);
9259
9260 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9261 memset(prop, 0, sizeof(prop));
9262 property_get("persist.camera.facedetect", prop, "1");
9263 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9264 LOGD("Support face detection mode: %d",
9265 supportedFaceDetectMode);
9266
9267 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009268 /* support mode should be OFF if max number of face is 0 */
9269 if (maxFaces <= 0) {
9270 supportedFaceDetectMode = 0;
9271 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009272 Vector<uint8_t> availableFaceDetectModes;
9273 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9274 if (supportedFaceDetectMode == 1) {
9275 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9276 } else if (supportedFaceDetectMode == 2) {
9277 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9278 } else if (supportedFaceDetectMode == 3) {
9279 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9280 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9281 } else {
9282 maxFaces = 0;
9283 }
9284 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9285 availableFaceDetectModes.array(),
9286 availableFaceDetectModes.size());
9287 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9288 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009289 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9290 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9291 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009292
9293 int32_t exposureCompensationRange[] = {
9294 gCamCapability[cameraId]->exposure_compensation_min,
9295 gCamCapability[cameraId]->exposure_compensation_max};
9296 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9297 exposureCompensationRange,
9298 sizeof(exposureCompensationRange)/sizeof(int32_t));
9299
9300 uint8_t lensFacing = (facingBack) ?
9301 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9302 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9303
9304 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9305 available_thumbnail_sizes,
9306 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9307
9308 /*all sizes will be clubbed into this tag*/
9309 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9310 /*android.scaler.availableStreamConfigurations*/
9311 Vector<int32_t> available_stream_configs;
9312 cam_dimension_t active_array_dim;
9313 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9314 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009315
9316 /*advertise list of input dimensions supported based on below property.
9317 By default all sizes upto 5MP will be advertised.
9318 Note that the setprop resolution format should be WxH.
9319 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9320 To list all supported sizes, setprop needs to be set with "0x0" */
9321 cam_dimension_t minInputSize = {2592,1944}; //5MP
9322 memset(prop, 0, sizeof(prop));
9323 property_get("persist.camera.input.minsize", prop, "2592x1944");
9324 if (strlen(prop) > 0) {
9325 char *saveptr = NULL;
9326 char *token = strtok_r(prop, "x", &saveptr);
9327 if (token != NULL) {
9328 minInputSize.width = atoi(token);
9329 }
9330 token = strtok_r(NULL, "x", &saveptr);
9331 if (token != NULL) {
9332 minInputSize.height = atoi(token);
9333 }
9334 }
9335
Thierry Strudel3d639192016-09-09 11:52:26 -07009336 /* Add input/output stream configurations for each scalar formats*/
9337 for (size_t j = 0; j < scalar_formats_count; j++) {
9338 switch (scalar_formats[j]) {
9339 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9340 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9341 case HAL_PIXEL_FORMAT_RAW10:
9342 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9343 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9344 addStreamConfig(available_stream_configs, scalar_formats[j],
9345 gCamCapability[cameraId]->raw_dim[i],
9346 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9347 }
9348 break;
9349 case HAL_PIXEL_FORMAT_BLOB:
9350 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9351 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9352 addStreamConfig(available_stream_configs, scalar_formats[j],
9353 gCamCapability[cameraId]->picture_sizes_tbl[i],
9354 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9355 }
9356 break;
9357 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9358 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9359 default:
9360 cam_dimension_t largest_picture_size;
9361 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9362 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9363 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9364 addStreamConfig(available_stream_configs, scalar_formats[j],
9365 gCamCapability[cameraId]->picture_sizes_tbl[i],
9366 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009367 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9368 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9369 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9370 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9371 >= minInputSize.width) || (gCamCapability[cameraId]->
9372 picture_sizes_tbl[i].height >= minInputSize.height)) {
9373 addStreamConfig(available_stream_configs, scalar_formats[j],
9374 gCamCapability[cameraId]->picture_sizes_tbl[i],
9375 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9376 }
9377 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009378 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009379
Thierry Strudel3d639192016-09-09 11:52:26 -07009380 break;
9381 }
9382 }
9383
9384 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9385 available_stream_configs.array(), available_stream_configs.size());
9386 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9387 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9388
9389 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9390 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9391
9392 /* android.scaler.availableMinFrameDurations */
9393 Vector<int64_t> available_min_durations;
9394 for (size_t j = 0; j < scalar_formats_count; j++) {
9395 switch (scalar_formats[j]) {
9396 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9397 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9398 case HAL_PIXEL_FORMAT_RAW10:
9399 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9400 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9401 available_min_durations.add(scalar_formats[j]);
9402 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9403 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9404 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9405 }
9406 break;
9407 default:
9408 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9409 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9410 available_min_durations.add(scalar_formats[j]);
9411 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9412 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9413 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9414 }
9415 break;
9416 }
9417 }
9418 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9419 available_min_durations.array(), available_min_durations.size());
9420
9421 Vector<int32_t> available_hfr_configs;
9422 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9423 int32_t fps = 0;
9424 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9425 case CAM_HFR_MODE_60FPS:
9426 fps = 60;
9427 break;
9428 case CAM_HFR_MODE_90FPS:
9429 fps = 90;
9430 break;
9431 case CAM_HFR_MODE_120FPS:
9432 fps = 120;
9433 break;
9434 case CAM_HFR_MODE_150FPS:
9435 fps = 150;
9436 break;
9437 case CAM_HFR_MODE_180FPS:
9438 fps = 180;
9439 break;
9440 case CAM_HFR_MODE_210FPS:
9441 fps = 210;
9442 break;
9443 case CAM_HFR_MODE_240FPS:
9444 fps = 240;
9445 break;
9446 case CAM_HFR_MODE_480FPS:
9447 fps = 480;
9448 break;
9449 case CAM_HFR_MODE_OFF:
9450 case CAM_HFR_MODE_MAX:
9451 default:
9452 break;
9453 }
9454
9455 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9456 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9457 /* For each HFR frame rate, need to advertise one variable fps range
9458 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9459 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9460 * set by the app. When video recording is started, [120, 120] is
9461 * set. This way sensor configuration does not change when recording
9462 * is started */
9463
9464 /* (width, height, fps_min, fps_max, batch_size_max) */
9465 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9466 j < MAX_SIZES_CNT; j++) {
9467 available_hfr_configs.add(
9468 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9469 available_hfr_configs.add(
9470 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9471 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9472 available_hfr_configs.add(fps);
9473 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9474
9475 /* (width, height, fps_min, fps_max, batch_size_max) */
9476 available_hfr_configs.add(
9477 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9478 available_hfr_configs.add(
9479 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9480 available_hfr_configs.add(fps);
9481 available_hfr_configs.add(fps);
9482 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9483 }
9484 }
9485 }
9486 //Advertise HFR capability only if the property is set
9487 memset(prop, 0, sizeof(prop));
9488 property_get("persist.camera.hal3hfr.enable", prop, "1");
9489 uint8_t hfrEnable = (uint8_t)atoi(prop);
9490
9491 if(hfrEnable && available_hfr_configs.array()) {
9492 staticInfo.update(
9493 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9494 available_hfr_configs.array(), available_hfr_configs.size());
9495 }
9496
9497 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9498 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9499 &max_jpeg_size, 1);
9500
9501 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9502 size_t size = 0;
9503 count = CAM_EFFECT_MODE_MAX;
9504 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9505 for (size_t i = 0; i < count; i++) {
9506 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9507 gCamCapability[cameraId]->supported_effects[i]);
9508 if (NAME_NOT_FOUND != val) {
9509 avail_effects[size] = (uint8_t)val;
9510 size++;
9511 }
9512 }
9513 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9514 avail_effects,
9515 size);
9516
9517 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9518 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9519 size_t supported_scene_modes_cnt = 0;
9520 count = CAM_SCENE_MODE_MAX;
9521 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9522 for (size_t i = 0; i < count; i++) {
9523 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9524 CAM_SCENE_MODE_OFF) {
9525 int val = lookupFwkName(SCENE_MODES_MAP,
9526 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9527 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009528
Thierry Strudel3d639192016-09-09 11:52:26 -07009529 if (NAME_NOT_FOUND != val) {
9530 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9531 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9532 supported_scene_modes_cnt++;
9533 }
9534 }
9535 }
9536 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9537 avail_scene_modes,
9538 supported_scene_modes_cnt);
9539
9540 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9541 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9542 supported_scene_modes_cnt,
9543 CAM_SCENE_MODE_MAX,
9544 scene_mode_overrides,
9545 supported_indexes,
9546 cameraId);
9547
9548 if (supported_scene_modes_cnt == 0) {
9549 supported_scene_modes_cnt = 1;
9550 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9551 }
9552
9553 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9554 scene_mode_overrides, supported_scene_modes_cnt * 3);
9555
9556 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9557 ANDROID_CONTROL_MODE_AUTO,
9558 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9559 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9560 available_control_modes,
9561 3);
9562
9563 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9564 size = 0;
9565 count = CAM_ANTIBANDING_MODE_MAX;
9566 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9567 for (size_t i = 0; i < count; i++) {
9568 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9569 gCamCapability[cameraId]->supported_antibandings[i]);
9570 if (NAME_NOT_FOUND != val) {
9571 avail_antibanding_modes[size] = (uint8_t)val;
9572 size++;
9573 }
9574
9575 }
9576 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9577 avail_antibanding_modes,
9578 size);
9579
9580 uint8_t avail_abberation_modes[] = {
9581 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9582 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9583 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9584 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9585 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9586 if (0 == count) {
9587 // If no aberration correction modes are available for a device, this advertise OFF mode
9588 size = 1;
9589 } else {
9590 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9591 // So, advertize all 3 modes if atleast any one mode is supported as per the
9592 // new M requirement
9593 size = 3;
9594 }
9595 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9596 avail_abberation_modes,
9597 size);
9598
9599 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9600 size = 0;
9601 count = CAM_FOCUS_MODE_MAX;
9602 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9603 for (size_t i = 0; i < count; i++) {
9604 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9605 gCamCapability[cameraId]->supported_focus_modes[i]);
9606 if (NAME_NOT_FOUND != val) {
9607 avail_af_modes[size] = (uint8_t)val;
9608 size++;
9609 }
9610 }
9611 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9612 avail_af_modes,
9613 size);
9614
9615 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9616 size = 0;
9617 count = CAM_WB_MODE_MAX;
9618 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9619 for (size_t i = 0; i < count; i++) {
9620 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9621 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9622 gCamCapability[cameraId]->supported_white_balances[i]);
9623 if (NAME_NOT_FOUND != val) {
9624 avail_awb_modes[size] = (uint8_t)val;
9625 size++;
9626 }
9627 }
9628 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9629 avail_awb_modes,
9630 size);
9631
9632 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9633 count = CAM_FLASH_FIRING_LEVEL_MAX;
9634 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9635 count);
9636 for (size_t i = 0; i < count; i++) {
9637 available_flash_levels[i] =
9638 gCamCapability[cameraId]->supported_firing_levels[i];
9639 }
9640 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9641 available_flash_levels, count);
9642
9643 uint8_t flashAvailable;
9644 if (gCamCapability[cameraId]->flash_available)
9645 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9646 else
9647 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9648 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9649 &flashAvailable, 1);
9650
9651 Vector<uint8_t> avail_ae_modes;
9652 count = CAM_AE_MODE_MAX;
9653 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9654 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009655 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9656 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9657 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9658 }
9659 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009660 }
9661 if (flashAvailable) {
9662 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9663 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9664 }
9665 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9666 avail_ae_modes.array(),
9667 avail_ae_modes.size());
9668
9669 int32_t sensitivity_range[2];
9670 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9671 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9672 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9673 sensitivity_range,
9674 sizeof(sensitivity_range) / sizeof(int32_t));
9675
9676 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9677 &gCamCapability[cameraId]->max_analog_sensitivity,
9678 1);
9679
9680 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9681 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9682 &sensor_orientation,
9683 1);
9684
9685 int32_t max_output_streams[] = {
9686 MAX_STALLING_STREAMS,
9687 MAX_PROCESSED_STREAMS,
9688 MAX_RAW_STREAMS};
9689 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9690 max_output_streams,
9691 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9692
9693 uint8_t avail_leds = 0;
9694 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9695 &avail_leds, 0);
9696
9697 uint8_t focus_dist_calibrated;
9698 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9699 gCamCapability[cameraId]->focus_dist_calibrated);
9700 if (NAME_NOT_FOUND != val) {
9701 focus_dist_calibrated = (uint8_t)val;
9702 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9703 &focus_dist_calibrated, 1);
9704 }
9705
9706 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9707 size = 0;
9708 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9709 MAX_TEST_PATTERN_CNT);
9710 for (size_t i = 0; i < count; i++) {
9711 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9712 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9713 if (NAME_NOT_FOUND != testpatternMode) {
9714 avail_testpattern_modes[size] = testpatternMode;
9715 size++;
9716 }
9717 }
9718 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9719 avail_testpattern_modes,
9720 size);
9721
9722 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9723 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9724 &max_pipeline_depth,
9725 1);
9726
9727 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9728 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9729 &partial_result_count,
9730 1);
9731
9732 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9733 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9734
9735 Vector<uint8_t> available_capabilities;
9736 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9737 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9738 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9739 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9740 if (supportBurst) {
9741 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9742 }
9743 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9744 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9745 if (hfrEnable && available_hfr_configs.array()) {
9746 available_capabilities.add(
9747 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9748 }
9749
9750 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9751 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9752 }
9753 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9754 available_capabilities.array(),
9755 available_capabilities.size());
9756
9757 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9758 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9759 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9760 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9761
9762 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9763 &aeLockAvailable, 1);
9764
9765 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9766 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9767 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9768 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9769
9770 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9771 &awbLockAvailable, 1);
9772
9773 int32_t max_input_streams = 1;
9774 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9775 &max_input_streams,
9776 1);
9777
9778 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9779 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9780 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9781 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9782 HAL_PIXEL_FORMAT_YCbCr_420_888};
9783 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9784 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9785
9786 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9787 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9788 &max_latency,
9789 1);
9790
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009791#ifndef USE_HAL_3_3
9792 int32_t isp_sensitivity_range[2];
9793 isp_sensitivity_range[0] =
9794 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9795 isp_sensitivity_range[1] =
9796 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9797 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9798 isp_sensitivity_range,
9799 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9800#endif
9801
Thierry Strudel3d639192016-09-09 11:52:26 -07009802 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9803 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9804 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9805 available_hot_pixel_modes,
9806 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9807
9808 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9809 ANDROID_SHADING_MODE_FAST,
9810 ANDROID_SHADING_MODE_HIGH_QUALITY};
9811 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9812 available_shading_modes,
9813 3);
9814
9815 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9816 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9817 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9818 available_lens_shading_map_modes,
9819 2);
9820
9821 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9822 ANDROID_EDGE_MODE_FAST,
9823 ANDROID_EDGE_MODE_HIGH_QUALITY,
9824 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9825 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9826 available_edge_modes,
9827 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9828
9829 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9830 ANDROID_NOISE_REDUCTION_MODE_FAST,
9831 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9832 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9833 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9834 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9835 available_noise_red_modes,
9836 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9837
9838 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9839 ANDROID_TONEMAP_MODE_FAST,
9840 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9841 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9842 available_tonemap_modes,
9843 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9844
9845 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9846 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9847 available_hot_pixel_map_modes,
9848 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9849
9850 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9851 gCamCapability[cameraId]->reference_illuminant1);
9852 if (NAME_NOT_FOUND != val) {
9853 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9854 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9855 }
9856
9857 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9858 gCamCapability[cameraId]->reference_illuminant2);
9859 if (NAME_NOT_FOUND != val) {
9860 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9861 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9862 }
9863
9864 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9865 (void *)gCamCapability[cameraId]->forward_matrix1,
9866 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9867
9868 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9869 (void *)gCamCapability[cameraId]->forward_matrix2,
9870 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9871
9872 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9873 (void *)gCamCapability[cameraId]->color_transform1,
9874 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9875
9876 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9877 (void *)gCamCapability[cameraId]->color_transform2,
9878 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9879
9880 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9881 (void *)gCamCapability[cameraId]->calibration_transform1,
9882 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9883
9884 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9885 (void *)gCamCapability[cameraId]->calibration_transform2,
9886 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9887
9888 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9889 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9890 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9891 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9892 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9893 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9894 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9895 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9896 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9897 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9898 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9899 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9900 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9901 ANDROID_JPEG_GPS_COORDINATES,
9902 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9903 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9904 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9905 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9906 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9907 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9908 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9909 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9910 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9911 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009912#ifndef USE_HAL_3_3
9913 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9914#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009915 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009916 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009917 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9918 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009919 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009920 /* DevCamDebug metadata request_keys_basic */
9921 DEVCAMDEBUG_META_ENABLE,
9922 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009923 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07009924 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
9925 NEXUS_EXPERIMENTAL_2017_SENSOR_MODE_FULLFOV
Samuel Ha68ba5172016-12-15 18:41:12 -08009926 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009927
9928 size_t request_keys_cnt =
9929 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9930 Vector<int32_t> available_request_keys;
9931 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9932 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9933 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9934 }
9935
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009936 if (gExposeEnableZslKey) {
9937 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
9938 }
9939
Thierry Strudel3d639192016-09-09 11:52:26 -07009940 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9941 available_request_keys.array(), available_request_keys.size());
9942
9943 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9944 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9945 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9946 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9947 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9948 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9949 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9950 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9951 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9952 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9953 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9954 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9955 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9956 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9957 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9958 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9959 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009960 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009961 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9962 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9963 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009964 ANDROID_STATISTICS_FACE_SCORES,
9965#ifndef USE_HAL_3_3
9966 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9967#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009968 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009969 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009970 // DevCamDebug metadata result_keys_basic
9971 DEVCAMDEBUG_META_ENABLE,
9972 // DevCamDebug metadata result_keys AF
9973 DEVCAMDEBUG_AF_LENS_POSITION,
9974 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9975 DEVCAMDEBUG_AF_TOF_DISTANCE,
9976 DEVCAMDEBUG_AF_LUMA,
9977 DEVCAMDEBUG_AF_HAF_STATE,
9978 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9979 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9980 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9981 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9982 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9983 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9984 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9985 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9986 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9987 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9988 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9989 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9990 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9991 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9992 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9993 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9994 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9995 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9996 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9997 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9998 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9999 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10000 // DevCamDebug metadata result_keys AEC
10001 DEVCAMDEBUG_AEC_TARGET_LUMA,
10002 DEVCAMDEBUG_AEC_COMP_LUMA,
10003 DEVCAMDEBUG_AEC_AVG_LUMA,
10004 DEVCAMDEBUG_AEC_CUR_LUMA,
10005 DEVCAMDEBUG_AEC_LINECOUNT,
10006 DEVCAMDEBUG_AEC_REAL_GAIN,
10007 DEVCAMDEBUG_AEC_EXP_INDEX,
10008 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010009 // DevCamDebug metadata result_keys zzHDR
10010 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10011 DEVCAMDEBUG_AEC_L_LINECOUNT,
10012 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10013 DEVCAMDEBUG_AEC_S_LINECOUNT,
10014 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10015 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10016 // DevCamDebug metadata result_keys ADRC
10017 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10018 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10019 DEVCAMDEBUG_AEC_GTM_RATIO,
10020 DEVCAMDEBUG_AEC_LTM_RATIO,
10021 DEVCAMDEBUG_AEC_LA_RATIO,
10022 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010023 // DevCamDebug metadata result_keys AWB
10024 DEVCAMDEBUG_AWB_R_GAIN,
10025 DEVCAMDEBUG_AWB_G_GAIN,
10026 DEVCAMDEBUG_AWB_B_GAIN,
10027 DEVCAMDEBUG_AWB_CCT,
10028 DEVCAMDEBUG_AWB_DECISION,
10029 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010030 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10031 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10032 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010033 };
10034
Thierry Strudel3d639192016-09-09 11:52:26 -070010035 size_t result_keys_cnt =
10036 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10037
10038 Vector<int32_t> available_result_keys;
10039 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10040 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10041 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10042 }
10043 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10044 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10045 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10046 }
10047 if (supportedFaceDetectMode == 1) {
10048 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10049 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10050 } else if ((supportedFaceDetectMode == 2) ||
10051 (supportedFaceDetectMode == 3)) {
10052 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10053 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10054 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010055#ifndef USE_HAL_3_3
10056 if (hasBlackRegions) {
10057 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10058 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10059 }
10060#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010061
10062 if (gExposeEnableZslKey) {
10063 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10064 }
10065
Thierry Strudel3d639192016-09-09 11:52:26 -070010066 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10067 available_result_keys.array(), available_result_keys.size());
10068
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010069 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010070 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10071 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10072 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10073 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10074 ANDROID_SCALER_CROPPING_TYPE,
10075 ANDROID_SYNC_MAX_LATENCY,
10076 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10077 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10078 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10079 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10080 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10081 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10082 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10083 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10084 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10085 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10086 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10087 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10088 ANDROID_LENS_FACING,
10089 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10090 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10091 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10092 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10093 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10094 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10095 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10096 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10097 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10098 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10099 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10100 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10101 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10102 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10103 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10104 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10105 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10106 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10107 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10108 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010109 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010110 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10111 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10112 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10113 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10114 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10115 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10116 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10117 ANDROID_CONTROL_AVAILABLE_MODES,
10118 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10119 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10120 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10121 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010122 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10123#ifndef USE_HAL_3_3
10124 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10125 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10126#endif
10127 };
10128
10129 Vector<int32_t> available_characteristics_keys;
10130 available_characteristics_keys.appendArray(characteristics_keys_basic,
10131 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10132#ifndef USE_HAL_3_3
10133 if (hasBlackRegions) {
10134 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10135 }
10136#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010137
10138 if (0 <= indexPD) {
10139 int32_t depthKeys[] = {
10140 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10141 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10142 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10143 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10144 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10145 };
10146 available_characteristics_keys.appendArray(depthKeys,
10147 sizeof(depthKeys) / sizeof(depthKeys[0]));
10148 }
10149
Thierry Strudel3d639192016-09-09 11:52:26 -070010150 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010151 available_characteristics_keys.array(),
10152 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010153
10154 /*available stall durations depend on the hw + sw and will be different for different devices */
10155 /*have to add for raw after implementation*/
10156 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10157 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10158
10159 Vector<int64_t> available_stall_durations;
10160 for (uint32_t j = 0; j < stall_formats_count; j++) {
10161 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10162 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10163 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10164 available_stall_durations.add(stall_formats[j]);
10165 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10166 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10167 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10168 }
10169 } else {
10170 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10171 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10172 available_stall_durations.add(stall_formats[j]);
10173 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10174 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10175 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10176 }
10177 }
10178 }
10179 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10180 available_stall_durations.array(),
10181 available_stall_durations.size());
10182
10183 //QCAMERA3_OPAQUE_RAW
10184 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10185 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10186 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10187 case LEGACY_RAW:
10188 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10189 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10190 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10191 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10192 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10193 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10194 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10195 break;
10196 case MIPI_RAW:
10197 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10198 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10199 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10200 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10201 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10202 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10203 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10204 break;
10205 default:
10206 LOGE("unknown opaque_raw_format %d",
10207 gCamCapability[cameraId]->opaque_raw_fmt);
10208 break;
10209 }
10210 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10211
10212 Vector<int32_t> strides;
10213 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10214 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10215 cam_stream_buf_plane_info_t buf_planes;
10216 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10217 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10218 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10219 &gCamCapability[cameraId]->padding_info, &buf_planes);
10220 strides.add(buf_planes.plane_info.mp[0].stride);
10221 }
10222 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10223 strides.size());
10224
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010225 //TBD: remove the following line once backend advertises zzHDR in feature mask
10226 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010227 //Video HDR default
10228 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10229 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010230 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010231 int32_t vhdr_mode[] = {
10232 QCAMERA3_VIDEO_HDR_MODE_OFF,
10233 QCAMERA3_VIDEO_HDR_MODE_ON};
10234
10235 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10236 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10237 vhdr_mode, vhdr_mode_count);
10238 }
10239
Thierry Strudel3d639192016-09-09 11:52:26 -070010240 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10241 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10242 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10243
10244 uint8_t isMonoOnly =
10245 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10246 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10247 &isMonoOnly, 1);
10248
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010249#ifndef USE_HAL_3_3
10250 Vector<int32_t> opaque_size;
10251 for (size_t j = 0; j < scalar_formats_count; j++) {
10252 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10253 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10254 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10255 cam_stream_buf_plane_info_t buf_planes;
10256
10257 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10258 &gCamCapability[cameraId]->padding_info, &buf_planes);
10259
10260 if (rc == 0) {
10261 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10262 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10263 opaque_size.add(buf_planes.plane_info.frame_len);
10264 }else {
10265 LOGE("raw frame calculation failed!");
10266 }
10267 }
10268 }
10269 }
10270
10271 if ((opaque_size.size() > 0) &&
10272 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10273 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10274 else
10275 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10276#endif
10277
Thierry Strudel04e026f2016-10-10 11:27:36 -070010278 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10279 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10280 size = 0;
10281 count = CAM_IR_MODE_MAX;
10282 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10283 for (size_t i = 0; i < count; i++) {
10284 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10285 gCamCapability[cameraId]->supported_ir_modes[i]);
10286 if (NAME_NOT_FOUND != val) {
10287 avail_ir_modes[size] = (int32_t)val;
10288 size++;
10289 }
10290 }
10291 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10292 avail_ir_modes, size);
10293 }
10294
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010295 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10296 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10297 size = 0;
10298 count = CAM_AEC_CONVERGENCE_MAX;
10299 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10300 for (size_t i = 0; i < count; i++) {
10301 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10302 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10303 if (NAME_NOT_FOUND != val) {
10304 available_instant_aec_modes[size] = (int32_t)val;
10305 size++;
10306 }
10307 }
10308 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10309 available_instant_aec_modes, size);
10310 }
10311
Thierry Strudel54dc9782017-02-15 12:12:10 -080010312 int32_t sharpness_range[] = {
10313 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10314 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10315 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10316
10317 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10318 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10319 size = 0;
10320 count = CAM_BINNING_CORRECTION_MODE_MAX;
10321 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10322 for (size_t i = 0; i < count; i++) {
10323 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10324 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10325 gCamCapability[cameraId]->supported_binning_modes[i]);
10326 if (NAME_NOT_FOUND != val) {
10327 avail_binning_modes[size] = (int32_t)val;
10328 size++;
10329 }
10330 }
10331 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10332 avail_binning_modes, size);
10333 }
10334
10335 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10336 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10337 size = 0;
10338 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10339 for (size_t i = 0; i < count; i++) {
10340 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10341 gCamCapability[cameraId]->supported_aec_modes[i]);
10342 if (NAME_NOT_FOUND != val)
10343 available_aec_modes[size++] = val;
10344 }
10345 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10346 available_aec_modes, size);
10347 }
10348
10349 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10350 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10351 size = 0;
10352 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10353 for (size_t i = 0; i < count; i++) {
10354 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10355 gCamCapability[cameraId]->supported_iso_modes[i]);
10356 if (NAME_NOT_FOUND != val)
10357 available_iso_modes[size++] = val;
10358 }
10359 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10360 available_iso_modes, size);
10361 }
10362
10363 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
10364 for (size_t i = 0; i < count; i++)
10365 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10366 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10367 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10368
10369 int32_t available_saturation_range[4];
10370 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10371 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10372 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10373 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10374 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10375 available_saturation_range, 4);
10376
10377 uint8_t is_hdr_values[2];
10378 is_hdr_values[0] = 0;
10379 is_hdr_values[1] = 1;
10380 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10381 is_hdr_values, 2);
10382
10383 float is_hdr_confidence_range[2];
10384 is_hdr_confidence_range[0] = 0.0;
10385 is_hdr_confidence_range[1] = 1.0;
10386 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10387 is_hdr_confidence_range, 2);
10388
Emilian Peev0a972ef2017-03-16 10:25:53 +000010389 size_t eepromLength = strnlen(
10390 reinterpret_cast<const char *>(
10391 gCamCapability[cameraId]->eeprom_version_info),
10392 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10393 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010394 char easelInfo[] = ",E:N";
10395 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10396 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10397 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010398 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10399 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010400 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010401 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10402 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10403 }
10404
Thierry Strudel3d639192016-09-09 11:52:26 -070010405 gStaticMetadata[cameraId] = staticInfo.release();
10406 return rc;
10407}
10408
10409/*===========================================================================
10410 * FUNCTION : makeTable
10411 *
10412 * DESCRIPTION: make a table of sizes
10413 *
10414 * PARAMETERS :
10415 *
10416 *
10417 *==========================================================================*/
10418void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10419 size_t max_size, int32_t *sizeTable)
10420{
10421 size_t j = 0;
10422 if (size > max_size) {
10423 size = max_size;
10424 }
10425 for (size_t i = 0; i < size; i++) {
10426 sizeTable[j] = dimTable[i].width;
10427 sizeTable[j+1] = dimTable[i].height;
10428 j+=2;
10429 }
10430}
10431
10432/*===========================================================================
10433 * FUNCTION : makeFPSTable
10434 *
10435 * DESCRIPTION: make a table of fps ranges
10436 *
10437 * PARAMETERS :
10438 *
10439 *==========================================================================*/
10440void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10441 size_t max_size, int32_t *fpsRangesTable)
10442{
10443 size_t j = 0;
10444 if (size > max_size) {
10445 size = max_size;
10446 }
10447 for (size_t i = 0; i < size; i++) {
10448 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10449 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10450 j+=2;
10451 }
10452}
10453
10454/*===========================================================================
10455 * FUNCTION : makeOverridesList
10456 *
10457 * DESCRIPTION: make a list of scene mode overrides
10458 *
10459 * PARAMETERS :
10460 *
10461 *
10462 *==========================================================================*/
10463void QCamera3HardwareInterface::makeOverridesList(
10464 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10465 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10466{
10467 /*daemon will give a list of overrides for all scene modes.
10468 However we should send the fwk only the overrides for the scene modes
10469 supported by the framework*/
10470 size_t j = 0;
10471 if (size > max_size) {
10472 size = max_size;
10473 }
10474 size_t focus_count = CAM_FOCUS_MODE_MAX;
10475 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10476 focus_count);
10477 for (size_t i = 0; i < size; i++) {
10478 bool supt = false;
10479 size_t index = supported_indexes[i];
10480 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10481 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10482 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10483 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10484 overridesTable[index].awb_mode);
10485 if (NAME_NOT_FOUND != val) {
10486 overridesList[j+1] = (uint8_t)val;
10487 }
10488 uint8_t focus_override = overridesTable[index].af_mode;
10489 for (size_t k = 0; k < focus_count; k++) {
10490 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10491 supt = true;
10492 break;
10493 }
10494 }
10495 if (supt) {
10496 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10497 focus_override);
10498 if (NAME_NOT_FOUND != val) {
10499 overridesList[j+2] = (uint8_t)val;
10500 }
10501 } else {
10502 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10503 }
10504 j+=3;
10505 }
10506}
10507
10508/*===========================================================================
10509 * FUNCTION : filterJpegSizes
10510 *
10511 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10512 * could be downscaled to
10513 *
10514 * PARAMETERS :
10515 *
10516 * RETURN : length of jpegSizes array
10517 *==========================================================================*/
10518
10519size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10520 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10521 uint8_t downscale_factor)
10522{
10523 if (0 == downscale_factor) {
10524 downscale_factor = 1;
10525 }
10526
10527 int32_t min_width = active_array_size.width / downscale_factor;
10528 int32_t min_height = active_array_size.height / downscale_factor;
10529 size_t jpegSizesCnt = 0;
10530 if (processedSizesCnt > maxCount) {
10531 processedSizesCnt = maxCount;
10532 }
10533 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10534 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10535 jpegSizes[jpegSizesCnt] = processedSizes[i];
10536 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10537 jpegSizesCnt += 2;
10538 }
10539 }
10540 return jpegSizesCnt;
10541}
10542
10543/*===========================================================================
10544 * FUNCTION : computeNoiseModelEntryS
10545 *
10546 * DESCRIPTION: function to map a given sensitivity to the S noise
10547 * model parameters in the DNG noise model.
10548 *
10549 * PARAMETERS : sens : the sensor sensitivity
10550 *
10551 ** RETURN : S (sensor amplification) noise
10552 *
10553 *==========================================================================*/
10554double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10555 double s = gCamCapability[mCameraId]->gradient_S * sens +
10556 gCamCapability[mCameraId]->offset_S;
10557 return ((s < 0.0) ? 0.0 : s);
10558}
10559
10560/*===========================================================================
10561 * FUNCTION : computeNoiseModelEntryO
10562 *
10563 * DESCRIPTION: function to map a given sensitivity to the O noise
10564 * model parameters in the DNG noise model.
10565 *
10566 * PARAMETERS : sens : the sensor sensitivity
10567 *
10568 ** RETURN : O (sensor readout) noise
10569 *
10570 *==========================================================================*/
10571double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10572 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10573 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10574 1.0 : (1.0 * sens / max_analog_sens);
10575 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10576 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10577 return ((o < 0.0) ? 0.0 : o);
10578}
10579
10580/*===========================================================================
10581 * FUNCTION : getSensorSensitivity
10582 *
10583 * DESCRIPTION: convert iso_mode to an integer value
10584 *
10585 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10586 *
10587 ** RETURN : sensitivity supported by sensor
10588 *
10589 *==========================================================================*/
10590int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10591{
10592 int32_t sensitivity;
10593
10594 switch (iso_mode) {
10595 case CAM_ISO_MODE_100:
10596 sensitivity = 100;
10597 break;
10598 case CAM_ISO_MODE_200:
10599 sensitivity = 200;
10600 break;
10601 case CAM_ISO_MODE_400:
10602 sensitivity = 400;
10603 break;
10604 case CAM_ISO_MODE_800:
10605 sensitivity = 800;
10606 break;
10607 case CAM_ISO_MODE_1600:
10608 sensitivity = 1600;
10609 break;
10610 default:
10611 sensitivity = -1;
10612 break;
10613 }
10614 return sensitivity;
10615}
10616
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010617int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010618 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010619 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10620 // to connect to Easel.
10621 bool doNotpowerOnEasel =
10622 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10623
10624 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010625 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10626 return OK;
10627 }
10628
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010629 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010630 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010631 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010632 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010633 return res;
10634 }
10635
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010636 EaselManagerClientOpened = true;
10637
10638 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010639 if (res != OK) {
10640 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10641 }
10642
10643 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010644 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010645
10646 // Expose enableZsl key only when HDR+ mode is enabled.
10647 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010648 }
10649
10650 return OK;
10651}
10652
Thierry Strudel3d639192016-09-09 11:52:26 -070010653/*===========================================================================
10654 * FUNCTION : getCamInfo
10655 *
10656 * DESCRIPTION: query camera capabilities
10657 *
10658 * PARAMETERS :
10659 * @cameraId : camera Id
10660 * @info : camera info struct to be filled in with camera capabilities
10661 *
10662 * RETURN : int type of status
10663 * NO_ERROR -- success
10664 * none-zero failure code
10665 *==========================================================================*/
10666int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10667 struct camera_info *info)
10668{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010669 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010670 int rc = 0;
10671
10672 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010673
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010674 {
10675 Mutex::Autolock l(gHdrPlusClientLock);
10676 rc = initHdrPlusClientLocked();
10677 if (rc != OK) {
10678 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10679 pthread_mutex_unlock(&gCamLock);
10680 return rc;
10681 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010682 }
10683
Thierry Strudel3d639192016-09-09 11:52:26 -070010684 if (NULL == gCamCapability[cameraId]) {
10685 rc = initCapabilities(cameraId);
10686 if (rc < 0) {
10687 pthread_mutex_unlock(&gCamLock);
10688 return rc;
10689 }
10690 }
10691
10692 if (NULL == gStaticMetadata[cameraId]) {
10693 rc = initStaticMetadata(cameraId);
10694 if (rc < 0) {
10695 pthread_mutex_unlock(&gCamLock);
10696 return rc;
10697 }
10698 }
10699
10700 switch(gCamCapability[cameraId]->position) {
10701 case CAM_POSITION_BACK:
10702 case CAM_POSITION_BACK_AUX:
10703 info->facing = CAMERA_FACING_BACK;
10704 break;
10705
10706 case CAM_POSITION_FRONT:
10707 case CAM_POSITION_FRONT_AUX:
10708 info->facing = CAMERA_FACING_FRONT;
10709 break;
10710
10711 default:
10712 LOGE("Unknown position type %d for camera id:%d",
10713 gCamCapability[cameraId]->position, cameraId);
10714 rc = -1;
10715 break;
10716 }
10717
10718
10719 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010720#ifndef USE_HAL_3_3
10721 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10722#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010723 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010724#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010725 info->static_camera_characteristics = gStaticMetadata[cameraId];
10726
10727 //For now assume both cameras can operate independently.
10728 info->conflicting_devices = NULL;
10729 info->conflicting_devices_length = 0;
10730
10731 //resource cost is 100 * MIN(1.0, m/M),
10732 //where m is throughput requirement with maximum stream configuration
10733 //and M is CPP maximum throughput.
10734 float max_fps = 0.0;
10735 for (uint32_t i = 0;
10736 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10737 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10738 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10739 }
10740 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10741 gCamCapability[cameraId]->active_array_size.width *
10742 gCamCapability[cameraId]->active_array_size.height * max_fps /
10743 gCamCapability[cameraId]->max_pixel_bandwidth;
10744 info->resource_cost = 100 * MIN(1.0, ratio);
10745 LOGI("camera %d resource cost is %d", cameraId,
10746 info->resource_cost);
10747
10748 pthread_mutex_unlock(&gCamLock);
10749 return rc;
10750}
10751
10752/*===========================================================================
10753 * FUNCTION : translateCapabilityToMetadata
10754 *
10755 * DESCRIPTION: translate the capability into camera_metadata_t
10756 *
10757 * PARAMETERS : type of the request
10758 *
10759 *
10760 * RETURN : success: camera_metadata_t*
10761 * failure: NULL
10762 *
10763 *==========================================================================*/
10764camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10765{
10766 if (mDefaultMetadata[type] != NULL) {
10767 return mDefaultMetadata[type];
10768 }
10769 //first time we are handling this request
10770 //fill up the metadata structure using the wrapper class
10771 CameraMetadata settings;
10772 //translate from cam_capability_t to camera_metadata_tag_t
10773 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10774 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10775 int32_t defaultRequestID = 0;
10776 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10777
10778 /* OIS disable */
10779 char ois_prop[PROPERTY_VALUE_MAX];
10780 memset(ois_prop, 0, sizeof(ois_prop));
10781 property_get("persist.camera.ois.disable", ois_prop, "0");
10782 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10783
10784 /* Force video to use OIS */
10785 char videoOisProp[PROPERTY_VALUE_MAX];
10786 memset(videoOisProp, 0, sizeof(videoOisProp));
10787 property_get("persist.camera.ois.video", videoOisProp, "1");
10788 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010789
10790 // Hybrid AE enable/disable
10791 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10792 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10793 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10794 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10795
Thierry Strudel3d639192016-09-09 11:52:26 -070010796 uint8_t controlIntent = 0;
10797 uint8_t focusMode;
10798 uint8_t vsMode;
10799 uint8_t optStabMode;
10800 uint8_t cacMode;
10801 uint8_t edge_mode;
10802 uint8_t noise_red_mode;
10803 uint8_t tonemap_mode;
10804 bool highQualityModeEntryAvailable = FALSE;
10805 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010806 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010807 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10808 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010809 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010810 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010811 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010812
Thierry Strudel3d639192016-09-09 11:52:26 -070010813 switch (type) {
10814 case CAMERA3_TEMPLATE_PREVIEW:
10815 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10816 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10817 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10818 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10819 edge_mode = ANDROID_EDGE_MODE_FAST;
10820 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10821 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10822 break;
10823 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10824 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10825 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10826 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10827 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10828 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10829 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10830 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10831 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10832 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10833 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10834 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10835 highQualityModeEntryAvailable = TRUE;
10836 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10837 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10838 fastModeEntryAvailable = TRUE;
10839 }
10840 }
10841 if (highQualityModeEntryAvailable) {
10842 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10843 } else if (fastModeEntryAvailable) {
10844 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10845 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010846 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10847 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10848 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010849 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010850 break;
10851 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10852 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10853 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10854 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010855 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10856 edge_mode = ANDROID_EDGE_MODE_FAST;
10857 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10858 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10859 if (forceVideoOis)
10860 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10861 break;
10862 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10863 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10864 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10865 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010866 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10867 edge_mode = ANDROID_EDGE_MODE_FAST;
10868 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10869 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10870 if (forceVideoOis)
10871 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10872 break;
10873 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10874 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10875 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10876 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10877 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10878 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10879 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10880 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10881 break;
10882 case CAMERA3_TEMPLATE_MANUAL:
10883 edge_mode = ANDROID_EDGE_MODE_FAST;
10884 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10885 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10886 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10887 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10888 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10889 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10890 break;
10891 default:
10892 edge_mode = ANDROID_EDGE_MODE_FAST;
10893 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10894 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10895 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10896 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10897 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10898 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10899 break;
10900 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010901 // Set CAC to OFF if underlying device doesn't support
10902 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10903 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10904 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010905 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10906 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10907 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10908 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10909 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10910 }
10911 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010912 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010913 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010914
10915 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10916 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10917 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10918 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10919 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10920 || ois_disable)
10921 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10922 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010923 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010924
10925 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10926 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10927
10928 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10929 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10930
10931 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10932 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10933
10934 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10935 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10936
10937 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10938 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10939
10940 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10941 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10942
10943 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10944 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10945
10946 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10947 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10948
10949 /*flash*/
10950 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10951 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10952
10953 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10954 settings.update(ANDROID_FLASH_FIRING_POWER,
10955 &flashFiringLevel, 1);
10956
10957 /* lens */
10958 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10959 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10960
10961 if (gCamCapability[mCameraId]->filter_densities_count) {
10962 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10963 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10964 gCamCapability[mCameraId]->filter_densities_count);
10965 }
10966
10967 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10968 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10969
Thierry Strudel3d639192016-09-09 11:52:26 -070010970 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10971 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10972
10973 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10974 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10975
10976 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10977 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10978
10979 /* face detection (default to OFF) */
10980 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10981 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10982
Thierry Strudel54dc9782017-02-15 12:12:10 -080010983 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10984 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010985
10986 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10987 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10988
10989 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10990 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10991
Thierry Strudel3d639192016-09-09 11:52:26 -070010992
10993 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10994 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
10995
10996 /* Exposure time(Update the Min Exposure Time)*/
10997 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
10998 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
10999
11000 /* frame duration */
11001 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11002 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11003
11004 /* sensitivity */
11005 static const int32_t default_sensitivity = 100;
11006 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011007#ifndef USE_HAL_3_3
11008 static const int32_t default_isp_sensitivity =
11009 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11010 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11011#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011012
11013 /*edge mode*/
11014 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11015
11016 /*noise reduction mode*/
11017 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11018
11019 /*color correction mode*/
11020 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11021 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11022
11023 /*transform matrix mode*/
11024 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11025
11026 int32_t scaler_crop_region[4];
11027 scaler_crop_region[0] = 0;
11028 scaler_crop_region[1] = 0;
11029 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11030 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11031 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11032
11033 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11034 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11035
11036 /*focus distance*/
11037 float focus_distance = 0.0;
11038 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11039
11040 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011041 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011042 float max_range = 0.0;
11043 float max_fixed_fps = 0.0;
11044 int32_t fps_range[2] = {0, 0};
11045 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11046 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011047 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11048 TEMPLATE_MAX_PREVIEW_FPS) {
11049 continue;
11050 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011051 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11052 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11053 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11054 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11055 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11056 if (range > max_range) {
11057 fps_range[0] =
11058 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11059 fps_range[1] =
11060 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11061 max_range = range;
11062 }
11063 } else {
11064 if (range < 0.01 && max_fixed_fps <
11065 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11066 fps_range[0] =
11067 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11068 fps_range[1] =
11069 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11070 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11071 }
11072 }
11073 }
11074 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11075
11076 /*precapture trigger*/
11077 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11078 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11079
11080 /*af trigger*/
11081 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11082 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11083
11084 /* ae & af regions */
11085 int32_t active_region[] = {
11086 gCamCapability[mCameraId]->active_array_size.left,
11087 gCamCapability[mCameraId]->active_array_size.top,
11088 gCamCapability[mCameraId]->active_array_size.left +
11089 gCamCapability[mCameraId]->active_array_size.width,
11090 gCamCapability[mCameraId]->active_array_size.top +
11091 gCamCapability[mCameraId]->active_array_size.height,
11092 0};
11093 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11094 sizeof(active_region) / sizeof(active_region[0]));
11095 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11096 sizeof(active_region) / sizeof(active_region[0]));
11097
11098 /* black level lock */
11099 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11100 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11101
Thierry Strudel3d639192016-09-09 11:52:26 -070011102 //special defaults for manual template
11103 if (type == CAMERA3_TEMPLATE_MANUAL) {
11104 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11105 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11106
11107 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11108 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11109
11110 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11111 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11112
11113 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11114 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11115
11116 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11117 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11118
11119 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11120 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11121 }
11122
11123
11124 /* TNR
11125 * We'll use this location to determine which modes TNR will be set.
11126 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11127 * This is not to be confused with linking on a per stream basis that decision
11128 * is still on per-session basis and will be handled as part of config stream
11129 */
11130 uint8_t tnr_enable = 0;
11131
11132 if (m_bTnrPreview || m_bTnrVideo) {
11133
11134 switch (type) {
11135 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11136 tnr_enable = 1;
11137 break;
11138
11139 default:
11140 tnr_enable = 0;
11141 break;
11142 }
11143
11144 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11145 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11146 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11147
11148 LOGD("TNR:%d with process plate %d for template:%d",
11149 tnr_enable, tnr_process_type, type);
11150 }
11151
11152 //Update Link tags to default
11153 int32_t sync_type = CAM_TYPE_STANDALONE;
11154 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11155
11156 int32_t is_main = 0; //this doesn't matter as app should overwrite
11157 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11158
11159 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
11160
11161 /* CDS default */
11162 char prop[PROPERTY_VALUE_MAX];
11163 memset(prop, 0, sizeof(prop));
11164 property_get("persist.camera.CDS", prop, "Auto");
11165 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11166 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11167 if (CAM_CDS_MODE_MAX == cds_mode) {
11168 cds_mode = CAM_CDS_MODE_AUTO;
11169 }
11170
11171 /* Disabling CDS in templates which have TNR enabled*/
11172 if (tnr_enable)
11173 cds_mode = CAM_CDS_MODE_OFF;
11174
11175 int32_t mode = cds_mode;
11176 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011177
Thierry Strudel269c81a2016-10-12 12:13:59 -070011178 /* Manual Convergence AEC Speed is disabled by default*/
11179 float default_aec_speed = 0;
11180 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11181
11182 /* Manual Convergence AWB Speed is disabled by default*/
11183 float default_awb_speed = 0;
11184 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11185
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011186 // Set instant AEC to normal convergence by default
11187 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11188 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11189
Shuzhen Wang19463d72016-03-08 11:09:52 -080011190 /* hybrid ae */
11191 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11192
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011193 if (gExposeEnableZslKey) {
11194 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11195 }
11196
Thierry Strudel3d639192016-09-09 11:52:26 -070011197 mDefaultMetadata[type] = settings.release();
11198
11199 return mDefaultMetadata[type];
11200}
11201
11202/*===========================================================================
11203 * FUNCTION : setFrameParameters
11204 *
11205 * DESCRIPTION: set parameters per frame as requested in the metadata from
11206 * framework
11207 *
11208 * PARAMETERS :
11209 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011210 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011211 * @blob_request: Whether this request is a blob request or not
11212 *
11213 * RETURN : success: NO_ERROR
11214 * failure:
11215 *==========================================================================*/
11216int QCamera3HardwareInterface::setFrameParameters(
11217 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011218 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011219 int blob_request,
11220 uint32_t snapshotStreamId)
11221{
11222 /*translate from camera_metadata_t type to parm_type_t*/
11223 int rc = 0;
11224 int32_t hal_version = CAM_HAL_V3;
11225
11226 clear_metadata_buffer(mParameters);
11227 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11228 LOGE("Failed to set hal version in the parameters");
11229 return BAD_VALUE;
11230 }
11231
11232 /*we need to update the frame number in the parameters*/
11233 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11234 request->frame_number)) {
11235 LOGE("Failed to set the frame number in the parameters");
11236 return BAD_VALUE;
11237 }
11238
11239 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011240 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011241 LOGE("Failed to set stream type mask in the parameters");
11242 return BAD_VALUE;
11243 }
11244
11245 if (mUpdateDebugLevel) {
11246 uint32_t dummyDebugLevel = 0;
11247 /* The value of dummyDebugLevel is irrelavent. On
11248 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11249 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11250 dummyDebugLevel)) {
11251 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11252 return BAD_VALUE;
11253 }
11254 mUpdateDebugLevel = false;
11255 }
11256
11257 if(request->settings != NULL){
11258 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11259 if (blob_request)
11260 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11261 }
11262
11263 return rc;
11264}
11265
11266/*===========================================================================
11267 * FUNCTION : setReprocParameters
11268 *
11269 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11270 * return it.
11271 *
11272 * PARAMETERS :
11273 * @request : request that needs to be serviced
11274 *
11275 * RETURN : success: NO_ERROR
11276 * failure:
11277 *==========================================================================*/
11278int32_t QCamera3HardwareInterface::setReprocParameters(
11279 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11280 uint32_t snapshotStreamId)
11281{
11282 /*translate from camera_metadata_t type to parm_type_t*/
11283 int rc = 0;
11284
11285 if (NULL == request->settings){
11286 LOGE("Reprocess settings cannot be NULL");
11287 return BAD_VALUE;
11288 }
11289
11290 if (NULL == reprocParam) {
11291 LOGE("Invalid reprocessing metadata buffer");
11292 return BAD_VALUE;
11293 }
11294 clear_metadata_buffer(reprocParam);
11295
11296 /*we need to update the frame number in the parameters*/
11297 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11298 request->frame_number)) {
11299 LOGE("Failed to set the frame number in the parameters");
11300 return BAD_VALUE;
11301 }
11302
11303 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11304 if (rc < 0) {
11305 LOGE("Failed to translate reproc request");
11306 return rc;
11307 }
11308
11309 CameraMetadata frame_settings;
11310 frame_settings = request->settings;
11311 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11312 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11313 int32_t *crop_count =
11314 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11315 int32_t *crop_data =
11316 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11317 int32_t *roi_map =
11318 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11319 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11320 cam_crop_data_t crop_meta;
11321 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11322 crop_meta.num_of_streams = 1;
11323 crop_meta.crop_info[0].crop.left = crop_data[0];
11324 crop_meta.crop_info[0].crop.top = crop_data[1];
11325 crop_meta.crop_info[0].crop.width = crop_data[2];
11326 crop_meta.crop_info[0].crop.height = crop_data[3];
11327
11328 crop_meta.crop_info[0].roi_map.left =
11329 roi_map[0];
11330 crop_meta.crop_info[0].roi_map.top =
11331 roi_map[1];
11332 crop_meta.crop_info[0].roi_map.width =
11333 roi_map[2];
11334 crop_meta.crop_info[0].roi_map.height =
11335 roi_map[3];
11336
11337 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11338 rc = BAD_VALUE;
11339 }
11340 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11341 request->input_buffer->stream,
11342 crop_meta.crop_info[0].crop.left,
11343 crop_meta.crop_info[0].crop.top,
11344 crop_meta.crop_info[0].crop.width,
11345 crop_meta.crop_info[0].crop.height);
11346 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11347 request->input_buffer->stream,
11348 crop_meta.crop_info[0].roi_map.left,
11349 crop_meta.crop_info[0].roi_map.top,
11350 crop_meta.crop_info[0].roi_map.width,
11351 crop_meta.crop_info[0].roi_map.height);
11352 } else {
11353 LOGE("Invalid reprocess crop count %d!", *crop_count);
11354 }
11355 } else {
11356 LOGE("No crop data from matching output stream");
11357 }
11358
11359 /* These settings are not needed for regular requests so handle them specially for
11360 reprocess requests; information needed for EXIF tags */
11361 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11362 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11363 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11364 if (NAME_NOT_FOUND != val) {
11365 uint32_t flashMode = (uint32_t)val;
11366 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11367 rc = BAD_VALUE;
11368 }
11369 } else {
11370 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11371 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11372 }
11373 } else {
11374 LOGH("No flash mode in reprocess settings");
11375 }
11376
11377 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11378 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11379 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11380 rc = BAD_VALUE;
11381 }
11382 } else {
11383 LOGH("No flash state in reprocess settings");
11384 }
11385
11386 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11387 uint8_t *reprocessFlags =
11388 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11389 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11390 *reprocessFlags)) {
11391 rc = BAD_VALUE;
11392 }
11393 }
11394
Thierry Strudel54dc9782017-02-15 12:12:10 -080011395 // Add exif debug data to internal metadata
11396 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11397 mm_jpeg_debug_exif_params_t *debug_params =
11398 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11399 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11400 // AE
11401 if (debug_params->ae_debug_params_valid == TRUE) {
11402 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11403 debug_params->ae_debug_params);
11404 }
11405 // AWB
11406 if (debug_params->awb_debug_params_valid == TRUE) {
11407 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11408 debug_params->awb_debug_params);
11409 }
11410 // AF
11411 if (debug_params->af_debug_params_valid == TRUE) {
11412 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11413 debug_params->af_debug_params);
11414 }
11415 // ASD
11416 if (debug_params->asd_debug_params_valid == TRUE) {
11417 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11418 debug_params->asd_debug_params);
11419 }
11420 // Stats
11421 if (debug_params->stats_debug_params_valid == TRUE) {
11422 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11423 debug_params->stats_debug_params);
11424 }
11425 // BE Stats
11426 if (debug_params->bestats_debug_params_valid == TRUE) {
11427 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11428 debug_params->bestats_debug_params);
11429 }
11430 // BHIST
11431 if (debug_params->bhist_debug_params_valid == TRUE) {
11432 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11433 debug_params->bhist_debug_params);
11434 }
11435 // 3A Tuning
11436 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11437 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11438 debug_params->q3a_tuning_debug_params);
11439 }
11440 }
11441
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011442 // Add metadata which reprocess needs
11443 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11444 cam_reprocess_info_t *repro_info =
11445 (cam_reprocess_info_t *)frame_settings.find
11446 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011447 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011448 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011449 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011450 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011451 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011452 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011453 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011454 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011455 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011456 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011457 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011458 repro_info->pipeline_flip);
11459 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11460 repro_info->af_roi);
11461 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11462 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011463 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11464 CAM_INTF_PARM_ROTATION metadata then has been added in
11465 translateToHalMetadata. HAL need to keep this new rotation
11466 metadata. Otherwise, the old rotation info saved in the vendor tag
11467 would be used */
11468 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11469 CAM_INTF_PARM_ROTATION, reprocParam) {
11470 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11471 } else {
11472 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011473 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011474 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011475 }
11476
11477 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11478 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11479 roi.width and roi.height would be the final JPEG size.
11480 For now, HAL only checks this for reprocess request */
11481 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11482 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11483 uint8_t *enable =
11484 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11485 if (*enable == TRUE) {
11486 int32_t *crop_data =
11487 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11488 cam_stream_crop_info_t crop_meta;
11489 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11490 crop_meta.stream_id = 0;
11491 crop_meta.crop.left = crop_data[0];
11492 crop_meta.crop.top = crop_data[1];
11493 crop_meta.crop.width = crop_data[2];
11494 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011495 // The JPEG crop roi should match cpp output size
11496 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11497 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11498 crop_meta.roi_map.left = 0;
11499 crop_meta.roi_map.top = 0;
11500 crop_meta.roi_map.width = cpp_crop->crop.width;
11501 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011502 }
11503 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11504 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011505 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011506 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011507 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11508 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011509 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011510 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11511
11512 // Add JPEG scale information
11513 cam_dimension_t scale_dim;
11514 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11515 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11516 int32_t *roi =
11517 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11518 scale_dim.width = roi[2];
11519 scale_dim.height = roi[3];
11520 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11521 scale_dim);
11522 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11523 scale_dim.width, scale_dim.height, mCameraId);
11524 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011525 }
11526 }
11527
11528 return rc;
11529}
11530
11531/*===========================================================================
11532 * FUNCTION : saveRequestSettings
11533 *
11534 * DESCRIPTION: Add any settings that might have changed to the request settings
11535 * and save the settings to be applied on the frame
11536 *
11537 * PARAMETERS :
11538 * @jpegMetadata : the extracted and/or modified jpeg metadata
11539 * @request : request with initial settings
11540 *
11541 * RETURN :
11542 * camera_metadata_t* : pointer to the saved request settings
11543 *==========================================================================*/
11544camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11545 const CameraMetadata &jpegMetadata,
11546 camera3_capture_request_t *request)
11547{
11548 camera_metadata_t *resultMetadata;
11549 CameraMetadata camMetadata;
11550 camMetadata = request->settings;
11551
11552 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11553 int32_t thumbnail_size[2];
11554 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11555 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11556 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11557 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11558 }
11559
11560 if (request->input_buffer != NULL) {
11561 uint8_t reprocessFlags = 1;
11562 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11563 (uint8_t*)&reprocessFlags,
11564 sizeof(reprocessFlags));
11565 }
11566
11567 resultMetadata = camMetadata.release();
11568 return resultMetadata;
11569}
11570
11571/*===========================================================================
11572 * FUNCTION : setHalFpsRange
11573 *
11574 * DESCRIPTION: set FPS range parameter
11575 *
11576 *
11577 * PARAMETERS :
11578 * @settings : Metadata from framework
11579 * @hal_metadata: Metadata buffer
11580 *
11581 *
11582 * RETURN : success: NO_ERROR
11583 * failure:
11584 *==========================================================================*/
11585int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11586 metadata_buffer_t *hal_metadata)
11587{
11588 int32_t rc = NO_ERROR;
11589 cam_fps_range_t fps_range;
11590 fps_range.min_fps = (float)
11591 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11592 fps_range.max_fps = (float)
11593 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11594 fps_range.video_min_fps = fps_range.min_fps;
11595 fps_range.video_max_fps = fps_range.max_fps;
11596
11597 LOGD("aeTargetFpsRange fps: [%f %f]",
11598 fps_range.min_fps, fps_range.max_fps);
11599 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11600 * follows:
11601 * ---------------------------------------------------------------|
11602 * Video stream is absent in configure_streams |
11603 * (Camcorder preview before the first video record |
11604 * ---------------------------------------------------------------|
11605 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11606 * | | | vid_min/max_fps|
11607 * ---------------------------------------------------------------|
11608 * NO | [ 30, 240] | 240 | [240, 240] |
11609 * |-------------|-------------|----------------|
11610 * | [240, 240] | 240 | [240, 240] |
11611 * ---------------------------------------------------------------|
11612 * Video stream is present in configure_streams |
11613 * ---------------------------------------------------------------|
11614 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11615 * | | | vid_min/max_fps|
11616 * ---------------------------------------------------------------|
11617 * NO | [ 30, 240] | 240 | [240, 240] |
11618 * (camcorder prev |-------------|-------------|----------------|
11619 * after video rec | [240, 240] | 240 | [240, 240] |
11620 * is stopped) | | | |
11621 * ---------------------------------------------------------------|
11622 * YES | [ 30, 240] | 240 | [240, 240] |
11623 * |-------------|-------------|----------------|
11624 * | [240, 240] | 240 | [240, 240] |
11625 * ---------------------------------------------------------------|
11626 * When Video stream is absent in configure_streams,
11627 * preview fps = sensor_fps / batchsize
11628 * Eg: for 240fps at batchSize 4, preview = 60fps
11629 * for 120fps at batchSize 4, preview = 30fps
11630 *
11631 * When video stream is present in configure_streams, preview fps is as per
11632 * the ratio of preview buffers to video buffers requested in process
11633 * capture request
11634 */
11635 mBatchSize = 0;
11636 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11637 fps_range.min_fps = fps_range.video_max_fps;
11638 fps_range.video_min_fps = fps_range.video_max_fps;
11639 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11640 fps_range.max_fps);
11641 if (NAME_NOT_FOUND != val) {
11642 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11643 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11644 return BAD_VALUE;
11645 }
11646
11647 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11648 /* If batchmode is currently in progress and the fps changes,
11649 * set the flag to restart the sensor */
11650 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11651 (mHFRVideoFps != fps_range.max_fps)) {
11652 mNeedSensorRestart = true;
11653 }
11654 mHFRVideoFps = fps_range.max_fps;
11655 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11656 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11657 mBatchSize = MAX_HFR_BATCH_SIZE;
11658 }
11659 }
11660 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11661
11662 }
11663 } else {
11664 /* HFR mode is session param in backend/ISP. This should be reset when
11665 * in non-HFR mode */
11666 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11667 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11668 return BAD_VALUE;
11669 }
11670 }
11671 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11672 return BAD_VALUE;
11673 }
11674 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11675 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11676 return rc;
11677}
11678
11679/*===========================================================================
11680 * FUNCTION : translateToHalMetadata
11681 *
11682 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11683 *
11684 *
11685 * PARAMETERS :
11686 * @request : request sent from framework
11687 *
11688 *
11689 * RETURN : success: NO_ERROR
11690 * failure:
11691 *==========================================================================*/
11692int QCamera3HardwareInterface::translateToHalMetadata
11693 (const camera3_capture_request_t *request,
11694 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011695 uint32_t snapshotStreamId) {
11696 if (request == nullptr || hal_metadata == nullptr) {
11697 return BAD_VALUE;
11698 }
11699
11700 int64_t minFrameDuration = getMinFrameDuration(request);
11701
11702 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11703 minFrameDuration);
11704}
11705
11706int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11707 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11708 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11709
Thierry Strudel3d639192016-09-09 11:52:26 -070011710 int rc = 0;
11711 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011712 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011713
11714 /* Do not change the order of the following list unless you know what you are
11715 * doing.
11716 * The order is laid out in such a way that parameters in the front of the table
11717 * may be used to override the parameters later in the table. Examples are:
11718 * 1. META_MODE should precede AEC/AWB/AF MODE
11719 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11720 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11721 * 4. Any mode should precede it's corresponding settings
11722 */
11723 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11724 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11725 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11726 rc = BAD_VALUE;
11727 }
11728 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11729 if (rc != NO_ERROR) {
11730 LOGE("extractSceneMode failed");
11731 }
11732 }
11733
11734 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11735 uint8_t fwk_aeMode =
11736 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11737 uint8_t aeMode;
11738 int32_t redeye;
11739
11740 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11741 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011742 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11743 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011744 } else {
11745 aeMode = CAM_AE_MODE_ON;
11746 }
11747 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11748 redeye = 1;
11749 } else {
11750 redeye = 0;
11751 }
11752
11753 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11754 fwk_aeMode);
11755 if (NAME_NOT_FOUND != val) {
11756 int32_t flashMode = (int32_t)val;
11757 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11758 }
11759
11760 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11761 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11762 rc = BAD_VALUE;
11763 }
11764 }
11765
11766 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11767 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11768 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11769 fwk_whiteLevel);
11770 if (NAME_NOT_FOUND != val) {
11771 uint8_t whiteLevel = (uint8_t)val;
11772 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11773 rc = BAD_VALUE;
11774 }
11775 }
11776 }
11777
11778 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11779 uint8_t fwk_cacMode =
11780 frame_settings.find(
11781 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11782 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11783 fwk_cacMode);
11784 if (NAME_NOT_FOUND != val) {
11785 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11786 bool entryAvailable = FALSE;
11787 // Check whether Frameworks set CAC mode is supported in device or not
11788 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11789 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11790 entryAvailable = TRUE;
11791 break;
11792 }
11793 }
11794 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11795 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11796 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11797 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11798 if (entryAvailable == FALSE) {
11799 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11800 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11801 } else {
11802 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11803 // High is not supported and so set the FAST as spec say's underlying
11804 // device implementation can be the same for both modes.
11805 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11806 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11807 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11808 // in order to avoid the fps drop due to high quality
11809 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11810 } else {
11811 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11812 }
11813 }
11814 }
11815 LOGD("Final cacMode is %d", cacMode);
11816 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11817 rc = BAD_VALUE;
11818 }
11819 } else {
11820 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11821 }
11822 }
11823
Thierry Strudel2896d122017-02-23 19:18:03 -080011824 char af_value[PROPERTY_VALUE_MAX];
11825 property_get("persist.camera.af.infinity", af_value, "0");
11826
Jason Lee84ae9972017-02-24 13:24:24 -080011827 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011828 if (atoi(af_value) == 0) {
11829 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011830 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011831 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11832 fwk_focusMode);
11833 if (NAME_NOT_FOUND != val) {
11834 uint8_t focusMode = (uint8_t)val;
11835 LOGD("set focus mode %d", focusMode);
11836 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11837 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11838 rc = BAD_VALUE;
11839 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011840 }
11841 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011842 } else {
11843 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11844 LOGE("Focus forced to infinity %d", focusMode);
11845 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11846 rc = BAD_VALUE;
11847 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011848 }
11849
Jason Lee84ae9972017-02-24 13:24:24 -080011850 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11851 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011852 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11853 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11854 focalDistance)) {
11855 rc = BAD_VALUE;
11856 }
11857 }
11858
11859 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11860 uint8_t fwk_antibandingMode =
11861 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11862 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11863 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11864 if (NAME_NOT_FOUND != val) {
11865 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011866 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11867 if (m60HzZone) {
11868 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11869 } else {
11870 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11871 }
11872 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011873 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11874 hal_antibandingMode)) {
11875 rc = BAD_VALUE;
11876 }
11877 }
11878 }
11879
11880 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11881 int32_t expCompensation = frame_settings.find(
11882 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11883 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11884 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11885 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11886 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011887 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011888 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11889 expCompensation)) {
11890 rc = BAD_VALUE;
11891 }
11892 }
11893
11894 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11895 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11896 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11897 rc = BAD_VALUE;
11898 }
11899 }
11900 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11901 rc = setHalFpsRange(frame_settings, hal_metadata);
11902 if (rc != NO_ERROR) {
11903 LOGE("setHalFpsRange failed");
11904 }
11905 }
11906
11907 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11908 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11909 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11910 rc = BAD_VALUE;
11911 }
11912 }
11913
11914 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11915 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11916 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11917 fwk_effectMode);
11918 if (NAME_NOT_FOUND != val) {
11919 uint8_t effectMode = (uint8_t)val;
11920 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11921 rc = BAD_VALUE;
11922 }
11923 }
11924 }
11925
11926 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11927 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11928 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11929 colorCorrectMode)) {
11930 rc = BAD_VALUE;
11931 }
11932 }
11933
11934 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11935 cam_color_correct_gains_t colorCorrectGains;
11936 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11937 colorCorrectGains.gains[i] =
11938 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11939 }
11940 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11941 colorCorrectGains)) {
11942 rc = BAD_VALUE;
11943 }
11944 }
11945
11946 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11947 cam_color_correct_matrix_t colorCorrectTransform;
11948 cam_rational_type_t transform_elem;
11949 size_t num = 0;
11950 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11951 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11952 transform_elem.numerator =
11953 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11954 transform_elem.denominator =
11955 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11956 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11957 num++;
11958 }
11959 }
11960 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11961 colorCorrectTransform)) {
11962 rc = BAD_VALUE;
11963 }
11964 }
11965
11966 cam_trigger_t aecTrigger;
11967 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11968 aecTrigger.trigger_id = -1;
11969 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11970 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11971 aecTrigger.trigger =
11972 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11973 aecTrigger.trigger_id =
11974 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11975 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11976 aecTrigger)) {
11977 rc = BAD_VALUE;
11978 }
11979 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11980 aecTrigger.trigger, aecTrigger.trigger_id);
11981 }
11982
11983 /*af_trigger must come with a trigger id*/
11984 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11985 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11986 cam_trigger_t af_trigger;
11987 af_trigger.trigger =
11988 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11989 af_trigger.trigger_id =
11990 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11991 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11992 rc = BAD_VALUE;
11993 }
11994 LOGD("AfTrigger: %d AfTriggerID: %d",
11995 af_trigger.trigger, af_trigger.trigger_id);
11996 }
11997
11998 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
11999 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12000 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12001 rc = BAD_VALUE;
12002 }
12003 }
12004 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12005 cam_edge_application_t edge_application;
12006 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012007
Thierry Strudel3d639192016-09-09 11:52:26 -070012008 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12009 edge_application.sharpness = 0;
12010 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012011 edge_application.sharpness =
12012 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12013 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12014 int32_t sharpness =
12015 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12016 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12017 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12018 LOGD("Setting edge mode sharpness %d", sharpness);
12019 edge_application.sharpness = sharpness;
12020 }
12021 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012022 }
12023 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12024 rc = BAD_VALUE;
12025 }
12026 }
12027
12028 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12029 int32_t respectFlashMode = 1;
12030 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12031 uint8_t fwk_aeMode =
12032 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012033 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12034 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12035 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012036 respectFlashMode = 0;
12037 LOGH("AE Mode controls flash, ignore android.flash.mode");
12038 }
12039 }
12040 if (respectFlashMode) {
12041 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12042 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12043 LOGH("flash mode after mapping %d", val);
12044 // To check: CAM_INTF_META_FLASH_MODE usage
12045 if (NAME_NOT_FOUND != val) {
12046 uint8_t flashMode = (uint8_t)val;
12047 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12048 rc = BAD_VALUE;
12049 }
12050 }
12051 }
12052 }
12053
12054 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12055 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12056 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12057 rc = BAD_VALUE;
12058 }
12059 }
12060
12061 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12062 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12063 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12064 flashFiringTime)) {
12065 rc = BAD_VALUE;
12066 }
12067 }
12068
12069 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12070 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12071 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12072 hotPixelMode)) {
12073 rc = BAD_VALUE;
12074 }
12075 }
12076
12077 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12078 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12079 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12080 lensAperture)) {
12081 rc = BAD_VALUE;
12082 }
12083 }
12084
12085 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12086 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12087 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12088 filterDensity)) {
12089 rc = BAD_VALUE;
12090 }
12091 }
12092
12093 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12094 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12095 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12096 focalLength)) {
12097 rc = BAD_VALUE;
12098 }
12099 }
12100
12101 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12102 uint8_t optStabMode =
12103 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12104 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12105 optStabMode)) {
12106 rc = BAD_VALUE;
12107 }
12108 }
12109
12110 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12111 uint8_t videoStabMode =
12112 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12113 LOGD("videoStabMode from APP = %d", videoStabMode);
12114 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12115 videoStabMode)) {
12116 rc = BAD_VALUE;
12117 }
12118 }
12119
12120
12121 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12122 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12123 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12124 noiseRedMode)) {
12125 rc = BAD_VALUE;
12126 }
12127 }
12128
12129 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12130 float reprocessEffectiveExposureFactor =
12131 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12132 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12133 reprocessEffectiveExposureFactor)) {
12134 rc = BAD_VALUE;
12135 }
12136 }
12137
12138 cam_crop_region_t scalerCropRegion;
12139 bool scalerCropSet = false;
12140 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12141 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12142 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12143 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12144 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12145
12146 // Map coordinate system from active array to sensor output.
12147 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12148 scalerCropRegion.width, scalerCropRegion.height);
12149
12150 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12151 scalerCropRegion)) {
12152 rc = BAD_VALUE;
12153 }
12154 scalerCropSet = true;
12155 }
12156
12157 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12158 int64_t sensorExpTime =
12159 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12160 LOGD("setting sensorExpTime %lld", sensorExpTime);
12161 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12162 sensorExpTime)) {
12163 rc = BAD_VALUE;
12164 }
12165 }
12166
12167 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12168 int64_t sensorFrameDuration =
12169 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012170 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12171 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12172 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12173 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12174 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12175 sensorFrameDuration)) {
12176 rc = BAD_VALUE;
12177 }
12178 }
12179
12180 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12181 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12182 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12183 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12184 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12185 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12186 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12187 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12188 sensorSensitivity)) {
12189 rc = BAD_VALUE;
12190 }
12191 }
12192
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012193#ifndef USE_HAL_3_3
12194 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12195 int32_t ispSensitivity =
12196 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12197 if (ispSensitivity <
12198 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12199 ispSensitivity =
12200 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12201 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12202 }
12203 if (ispSensitivity >
12204 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12205 ispSensitivity =
12206 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12207 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12208 }
12209 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12210 ispSensitivity)) {
12211 rc = BAD_VALUE;
12212 }
12213 }
12214#endif
12215
Thierry Strudel3d639192016-09-09 11:52:26 -070012216 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12217 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12218 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12219 rc = BAD_VALUE;
12220 }
12221 }
12222
12223 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12224 uint8_t fwk_facedetectMode =
12225 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12226
12227 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12228 fwk_facedetectMode);
12229
12230 if (NAME_NOT_FOUND != val) {
12231 uint8_t facedetectMode = (uint8_t)val;
12232 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12233 facedetectMode)) {
12234 rc = BAD_VALUE;
12235 }
12236 }
12237 }
12238
Thierry Strudel54dc9782017-02-15 12:12:10 -080012239 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012240 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012241 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012242 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12243 histogramMode)) {
12244 rc = BAD_VALUE;
12245 }
12246 }
12247
12248 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12249 uint8_t sharpnessMapMode =
12250 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12251 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12252 sharpnessMapMode)) {
12253 rc = BAD_VALUE;
12254 }
12255 }
12256
12257 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12258 uint8_t tonemapMode =
12259 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12260 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12261 rc = BAD_VALUE;
12262 }
12263 }
12264 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12265 /*All tonemap channels will have the same number of points*/
12266 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12267 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12268 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12269 cam_rgb_tonemap_curves tonemapCurves;
12270 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12271 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12272 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12273 tonemapCurves.tonemap_points_cnt,
12274 CAM_MAX_TONEMAP_CURVE_SIZE);
12275 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12276 }
12277
12278 /* ch0 = G*/
12279 size_t point = 0;
12280 cam_tonemap_curve_t tonemapCurveGreen;
12281 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12282 for (size_t j = 0; j < 2; j++) {
12283 tonemapCurveGreen.tonemap_points[i][j] =
12284 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12285 point++;
12286 }
12287 }
12288 tonemapCurves.curves[0] = tonemapCurveGreen;
12289
12290 /* ch 1 = B */
12291 point = 0;
12292 cam_tonemap_curve_t tonemapCurveBlue;
12293 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12294 for (size_t j = 0; j < 2; j++) {
12295 tonemapCurveBlue.tonemap_points[i][j] =
12296 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12297 point++;
12298 }
12299 }
12300 tonemapCurves.curves[1] = tonemapCurveBlue;
12301
12302 /* ch 2 = R */
12303 point = 0;
12304 cam_tonemap_curve_t tonemapCurveRed;
12305 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12306 for (size_t j = 0; j < 2; j++) {
12307 tonemapCurveRed.tonemap_points[i][j] =
12308 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12309 point++;
12310 }
12311 }
12312 tonemapCurves.curves[2] = tonemapCurveRed;
12313
12314 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12315 tonemapCurves)) {
12316 rc = BAD_VALUE;
12317 }
12318 }
12319
12320 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12321 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12322 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12323 captureIntent)) {
12324 rc = BAD_VALUE;
12325 }
12326 }
12327
12328 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12329 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12330 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12331 blackLevelLock)) {
12332 rc = BAD_VALUE;
12333 }
12334 }
12335
12336 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12337 uint8_t lensShadingMapMode =
12338 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12339 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12340 lensShadingMapMode)) {
12341 rc = BAD_VALUE;
12342 }
12343 }
12344
12345 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12346 cam_area_t roi;
12347 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012348 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012349
12350 // Map coordinate system from active array to sensor output.
12351 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12352 roi.rect.height);
12353
12354 if (scalerCropSet) {
12355 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12356 }
12357 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12358 rc = BAD_VALUE;
12359 }
12360 }
12361
12362 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12363 cam_area_t roi;
12364 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012365 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012366
12367 // Map coordinate system from active array to sensor output.
12368 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12369 roi.rect.height);
12370
12371 if (scalerCropSet) {
12372 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12373 }
12374 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12375 rc = BAD_VALUE;
12376 }
12377 }
12378
12379 // CDS for non-HFR non-video mode
12380 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12381 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12382 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12383 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12384 LOGE("Invalid CDS mode %d!", *fwk_cds);
12385 } else {
12386 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12387 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12388 rc = BAD_VALUE;
12389 }
12390 }
12391 }
12392
Thierry Strudel04e026f2016-10-10 11:27:36 -070012393 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012394 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012395 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012396 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12397 }
12398 if (m_bVideoHdrEnabled)
12399 vhdr = CAM_VIDEO_HDR_MODE_ON;
12400
Thierry Strudel54dc9782017-02-15 12:12:10 -080012401 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12402
12403 if(vhdr != curr_hdr_state)
12404 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12405
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012406 rc = setVideoHdrMode(mParameters, vhdr);
12407 if (rc != NO_ERROR) {
12408 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012409 }
12410
12411 //IR
12412 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12413 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12414 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012415 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12416 uint8_t isIRon = 0;
12417
12418 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012419 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12420 LOGE("Invalid IR mode %d!", fwk_ir);
12421 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012422 if(isIRon != curr_ir_state )
12423 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12424
Thierry Strudel04e026f2016-10-10 11:27:36 -070012425 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12426 CAM_INTF_META_IR_MODE, fwk_ir)) {
12427 rc = BAD_VALUE;
12428 }
12429 }
12430 }
12431
Thierry Strudel54dc9782017-02-15 12:12:10 -080012432 //Binning Correction Mode
12433 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12434 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12435 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12436 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12437 || (0 > fwk_binning_correction)) {
12438 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12439 } else {
12440 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12441 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12442 rc = BAD_VALUE;
12443 }
12444 }
12445 }
12446
Thierry Strudel269c81a2016-10-12 12:13:59 -070012447 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12448 float aec_speed;
12449 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12450 LOGD("AEC Speed :%f", aec_speed);
12451 if ( aec_speed < 0 ) {
12452 LOGE("Invalid AEC mode %f!", aec_speed);
12453 } else {
12454 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12455 aec_speed)) {
12456 rc = BAD_VALUE;
12457 }
12458 }
12459 }
12460
12461 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12462 float awb_speed;
12463 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12464 LOGD("AWB Speed :%f", awb_speed);
12465 if ( awb_speed < 0 ) {
12466 LOGE("Invalid AWB mode %f!", awb_speed);
12467 } else {
12468 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12469 awb_speed)) {
12470 rc = BAD_VALUE;
12471 }
12472 }
12473 }
12474
Thierry Strudel3d639192016-09-09 11:52:26 -070012475 // TNR
12476 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12477 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12478 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012479 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012480 cam_denoise_param_t tnr;
12481 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12482 tnr.process_plates =
12483 (cam_denoise_process_type_t)frame_settings.find(
12484 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12485 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012486
12487 if(b_TnrRequested != curr_tnr_state)
12488 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12489
Thierry Strudel3d639192016-09-09 11:52:26 -070012490 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12491 rc = BAD_VALUE;
12492 }
12493 }
12494
Thierry Strudel54dc9782017-02-15 12:12:10 -080012495 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012496 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012497 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012498 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12499 *exposure_metering_mode)) {
12500 rc = BAD_VALUE;
12501 }
12502 }
12503
Thierry Strudel3d639192016-09-09 11:52:26 -070012504 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12505 int32_t fwk_testPatternMode =
12506 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12507 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12508 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12509
12510 if (NAME_NOT_FOUND != testPatternMode) {
12511 cam_test_pattern_data_t testPatternData;
12512 memset(&testPatternData, 0, sizeof(testPatternData));
12513 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12514 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12515 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12516 int32_t *fwk_testPatternData =
12517 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12518 testPatternData.r = fwk_testPatternData[0];
12519 testPatternData.b = fwk_testPatternData[3];
12520 switch (gCamCapability[mCameraId]->color_arrangement) {
12521 case CAM_FILTER_ARRANGEMENT_RGGB:
12522 case CAM_FILTER_ARRANGEMENT_GRBG:
12523 testPatternData.gr = fwk_testPatternData[1];
12524 testPatternData.gb = fwk_testPatternData[2];
12525 break;
12526 case CAM_FILTER_ARRANGEMENT_GBRG:
12527 case CAM_FILTER_ARRANGEMENT_BGGR:
12528 testPatternData.gr = fwk_testPatternData[2];
12529 testPatternData.gb = fwk_testPatternData[1];
12530 break;
12531 default:
12532 LOGE("color arrangement %d is not supported",
12533 gCamCapability[mCameraId]->color_arrangement);
12534 break;
12535 }
12536 }
12537 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12538 testPatternData)) {
12539 rc = BAD_VALUE;
12540 }
12541 } else {
12542 LOGE("Invalid framework sensor test pattern mode %d",
12543 fwk_testPatternMode);
12544 }
12545 }
12546
12547 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12548 size_t count = 0;
12549 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12550 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12551 gps_coords.data.d, gps_coords.count, count);
12552 if (gps_coords.count != count) {
12553 rc = BAD_VALUE;
12554 }
12555 }
12556
12557 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12558 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12559 size_t count = 0;
12560 const char *gps_methods_src = (const char *)
12561 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12562 memset(gps_methods, '\0', sizeof(gps_methods));
12563 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12564 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12565 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12566 if (GPS_PROCESSING_METHOD_SIZE != count) {
12567 rc = BAD_VALUE;
12568 }
12569 }
12570
12571 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12572 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12573 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12574 gps_timestamp)) {
12575 rc = BAD_VALUE;
12576 }
12577 }
12578
12579 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12580 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12581 cam_rotation_info_t rotation_info;
12582 if (orientation == 0) {
12583 rotation_info.rotation = ROTATE_0;
12584 } else if (orientation == 90) {
12585 rotation_info.rotation = ROTATE_90;
12586 } else if (orientation == 180) {
12587 rotation_info.rotation = ROTATE_180;
12588 } else if (orientation == 270) {
12589 rotation_info.rotation = ROTATE_270;
12590 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012591 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012592 rotation_info.streamId = snapshotStreamId;
12593 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12594 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12595 rc = BAD_VALUE;
12596 }
12597 }
12598
12599 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12600 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12601 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12602 rc = BAD_VALUE;
12603 }
12604 }
12605
12606 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12607 uint32_t thumb_quality = (uint32_t)
12608 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12609 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12610 thumb_quality)) {
12611 rc = BAD_VALUE;
12612 }
12613 }
12614
12615 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12616 cam_dimension_t dim;
12617 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12618 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12619 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12620 rc = BAD_VALUE;
12621 }
12622 }
12623
12624 // Internal metadata
12625 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12626 size_t count = 0;
12627 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12628 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12629 privatedata.data.i32, privatedata.count, count);
12630 if (privatedata.count != count) {
12631 rc = BAD_VALUE;
12632 }
12633 }
12634
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012635 // ISO/Exposure Priority
12636 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12637 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12638 cam_priority_mode_t mode =
12639 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12640 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12641 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12642 use_iso_exp_pty.previewOnly = FALSE;
12643 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12644 use_iso_exp_pty.value = *ptr;
12645
12646 if(CAM_ISO_PRIORITY == mode) {
12647 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12648 use_iso_exp_pty)) {
12649 rc = BAD_VALUE;
12650 }
12651 }
12652 else {
12653 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12654 use_iso_exp_pty)) {
12655 rc = BAD_VALUE;
12656 }
12657 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012658
12659 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12660 rc = BAD_VALUE;
12661 }
12662 }
12663 } else {
12664 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12665 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012666 }
12667 }
12668
12669 // Saturation
12670 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12671 int32_t* use_saturation =
12672 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12673 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12674 rc = BAD_VALUE;
12675 }
12676 }
12677
Thierry Strudel3d639192016-09-09 11:52:26 -070012678 // EV step
12679 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12680 gCamCapability[mCameraId]->exp_compensation_step)) {
12681 rc = BAD_VALUE;
12682 }
12683
12684 // CDS info
12685 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12686 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12687 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12688
12689 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12690 CAM_INTF_META_CDS_DATA, *cdsData)) {
12691 rc = BAD_VALUE;
12692 }
12693 }
12694
Shuzhen Wang19463d72016-03-08 11:09:52 -080012695 // Hybrid AE
12696 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12697 uint8_t *hybrid_ae = (uint8_t *)
12698 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12699
12700 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12701 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12702 rc = BAD_VALUE;
12703 }
12704 }
12705
Shuzhen Wang14415f52016-11-16 18:26:18 -080012706 // Histogram
12707 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12708 uint8_t histogramMode =
12709 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12710 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12711 histogramMode)) {
12712 rc = BAD_VALUE;
12713 }
12714 }
12715
12716 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12717 int32_t histogramBins =
12718 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12719 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12720 histogramBins)) {
12721 rc = BAD_VALUE;
12722 }
12723 }
12724
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012725 // Tracking AF
12726 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12727 uint8_t trackingAfTrigger =
12728 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12729 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12730 trackingAfTrigger)) {
12731 rc = BAD_VALUE;
12732 }
12733 }
12734
Thierry Strudel3d639192016-09-09 11:52:26 -070012735 return rc;
12736}
12737
12738/*===========================================================================
12739 * FUNCTION : captureResultCb
12740 *
12741 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12742 *
12743 * PARAMETERS :
12744 * @frame : frame information from mm-camera-interface
12745 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12746 * @userdata: userdata
12747 *
12748 * RETURN : NONE
12749 *==========================================================================*/
12750void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12751 camera3_stream_buffer_t *buffer,
12752 uint32_t frame_number, bool isInputBuffer, void *userdata)
12753{
12754 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12755 if (hw == NULL) {
12756 LOGE("Invalid hw %p", hw);
12757 return;
12758 }
12759
12760 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12761 return;
12762}
12763
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012764/*===========================================================================
12765 * FUNCTION : setBufferErrorStatus
12766 *
12767 * DESCRIPTION: Callback handler for channels to report any buffer errors
12768 *
12769 * PARAMETERS :
12770 * @ch : Channel on which buffer error is reported from
12771 * @frame_number : frame number on which buffer error is reported on
12772 * @buffer_status : buffer error status
12773 * @userdata: userdata
12774 *
12775 * RETURN : NONE
12776 *==========================================================================*/
12777void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12778 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12779{
12780 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12781 if (hw == NULL) {
12782 LOGE("Invalid hw %p", hw);
12783 return;
12784 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012785
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012786 hw->setBufferErrorStatus(ch, frame_number, err);
12787 return;
12788}
12789
12790void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12791 uint32_t frameNumber, camera3_buffer_status_t err)
12792{
12793 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12794 pthread_mutex_lock(&mMutex);
12795
12796 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12797 if (req.frame_number != frameNumber)
12798 continue;
12799 for (auto& k : req.mPendingBufferList) {
12800 if(k.stream->priv == ch) {
12801 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12802 }
12803 }
12804 }
12805
12806 pthread_mutex_unlock(&mMutex);
12807 return;
12808}
Thierry Strudel3d639192016-09-09 11:52:26 -070012809/*===========================================================================
12810 * FUNCTION : initialize
12811 *
12812 * DESCRIPTION: Pass framework callback pointers to HAL
12813 *
12814 * PARAMETERS :
12815 *
12816 *
12817 * RETURN : Success : 0
12818 * Failure: -ENODEV
12819 *==========================================================================*/
12820
12821int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12822 const camera3_callback_ops_t *callback_ops)
12823{
12824 LOGD("E");
12825 QCamera3HardwareInterface *hw =
12826 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12827 if (!hw) {
12828 LOGE("NULL camera device");
12829 return -ENODEV;
12830 }
12831
12832 int rc = hw->initialize(callback_ops);
12833 LOGD("X");
12834 return rc;
12835}
12836
12837/*===========================================================================
12838 * FUNCTION : configure_streams
12839 *
12840 * DESCRIPTION:
12841 *
12842 * PARAMETERS :
12843 *
12844 *
12845 * RETURN : Success: 0
12846 * Failure: -EINVAL (if stream configuration is invalid)
12847 * -ENODEV (fatal error)
12848 *==========================================================================*/
12849
12850int QCamera3HardwareInterface::configure_streams(
12851 const struct camera3_device *device,
12852 camera3_stream_configuration_t *stream_list)
12853{
12854 LOGD("E");
12855 QCamera3HardwareInterface *hw =
12856 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12857 if (!hw) {
12858 LOGE("NULL camera device");
12859 return -ENODEV;
12860 }
12861 int rc = hw->configureStreams(stream_list);
12862 LOGD("X");
12863 return rc;
12864}
12865
12866/*===========================================================================
12867 * FUNCTION : construct_default_request_settings
12868 *
12869 * DESCRIPTION: Configure a settings buffer to meet the required use case
12870 *
12871 * PARAMETERS :
12872 *
12873 *
12874 * RETURN : Success: Return valid metadata
12875 * Failure: Return NULL
12876 *==========================================================================*/
12877const camera_metadata_t* QCamera3HardwareInterface::
12878 construct_default_request_settings(const struct camera3_device *device,
12879 int type)
12880{
12881
12882 LOGD("E");
12883 camera_metadata_t* fwk_metadata = NULL;
12884 QCamera3HardwareInterface *hw =
12885 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12886 if (!hw) {
12887 LOGE("NULL camera device");
12888 return NULL;
12889 }
12890
12891 fwk_metadata = hw->translateCapabilityToMetadata(type);
12892
12893 LOGD("X");
12894 return fwk_metadata;
12895}
12896
12897/*===========================================================================
12898 * FUNCTION : process_capture_request
12899 *
12900 * DESCRIPTION:
12901 *
12902 * PARAMETERS :
12903 *
12904 *
12905 * RETURN :
12906 *==========================================================================*/
12907int QCamera3HardwareInterface::process_capture_request(
12908 const struct camera3_device *device,
12909 camera3_capture_request_t *request)
12910{
12911 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012912 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012913 QCamera3HardwareInterface *hw =
12914 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12915 if (!hw) {
12916 LOGE("NULL camera device");
12917 return -EINVAL;
12918 }
12919
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012920 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012921 LOGD("X");
12922 return rc;
12923}
12924
12925/*===========================================================================
12926 * FUNCTION : dump
12927 *
12928 * DESCRIPTION:
12929 *
12930 * PARAMETERS :
12931 *
12932 *
12933 * RETURN :
12934 *==========================================================================*/
12935
12936void QCamera3HardwareInterface::dump(
12937 const struct camera3_device *device, int fd)
12938{
12939 /* Log level property is read when "adb shell dumpsys media.camera" is
12940 called so that the log level can be controlled without restarting
12941 the media server */
12942 getLogLevel();
12943
12944 LOGD("E");
12945 QCamera3HardwareInterface *hw =
12946 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12947 if (!hw) {
12948 LOGE("NULL camera device");
12949 return;
12950 }
12951
12952 hw->dump(fd);
12953 LOGD("X");
12954 return;
12955}
12956
12957/*===========================================================================
12958 * FUNCTION : flush
12959 *
12960 * DESCRIPTION:
12961 *
12962 * PARAMETERS :
12963 *
12964 *
12965 * RETURN :
12966 *==========================================================================*/
12967
12968int QCamera3HardwareInterface::flush(
12969 const struct camera3_device *device)
12970{
12971 int rc;
12972 LOGD("E");
12973 QCamera3HardwareInterface *hw =
12974 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12975 if (!hw) {
12976 LOGE("NULL camera device");
12977 return -EINVAL;
12978 }
12979
12980 pthread_mutex_lock(&hw->mMutex);
12981 // Validate current state
12982 switch (hw->mState) {
12983 case STARTED:
12984 /* valid state */
12985 break;
12986
12987 case ERROR:
12988 pthread_mutex_unlock(&hw->mMutex);
12989 hw->handleCameraDeviceError();
12990 return -ENODEV;
12991
12992 default:
12993 LOGI("Flush returned during state %d", hw->mState);
12994 pthread_mutex_unlock(&hw->mMutex);
12995 return 0;
12996 }
12997 pthread_mutex_unlock(&hw->mMutex);
12998
12999 rc = hw->flush(true /* restart channels */ );
13000 LOGD("X");
13001 return rc;
13002}
13003
13004/*===========================================================================
13005 * FUNCTION : close_camera_device
13006 *
13007 * DESCRIPTION:
13008 *
13009 * PARAMETERS :
13010 *
13011 *
13012 * RETURN :
13013 *==========================================================================*/
13014int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13015{
13016 int ret = NO_ERROR;
13017 QCamera3HardwareInterface *hw =
13018 reinterpret_cast<QCamera3HardwareInterface *>(
13019 reinterpret_cast<camera3_device_t *>(device)->priv);
13020 if (!hw) {
13021 LOGE("NULL camera device");
13022 return BAD_VALUE;
13023 }
13024
13025 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13026 delete hw;
13027 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013028 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013029 return ret;
13030}
13031
13032/*===========================================================================
13033 * FUNCTION : getWaveletDenoiseProcessPlate
13034 *
13035 * DESCRIPTION: query wavelet denoise process plate
13036 *
13037 * PARAMETERS : None
13038 *
13039 * RETURN : WNR prcocess plate value
13040 *==========================================================================*/
13041cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13042{
13043 char prop[PROPERTY_VALUE_MAX];
13044 memset(prop, 0, sizeof(prop));
13045 property_get("persist.denoise.process.plates", prop, "0");
13046 int processPlate = atoi(prop);
13047 switch(processPlate) {
13048 case 0:
13049 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13050 case 1:
13051 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13052 case 2:
13053 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13054 case 3:
13055 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13056 default:
13057 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13058 }
13059}
13060
13061
13062/*===========================================================================
13063 * FUNCTION : getTemporalDenoiseProcessPlate
13064 *
13065 * DESCRIPTION: query temporal denoise process plate
13066 *
13067 * PARAMETERS : None
13068 *
13069 * RETURN : TNR prcocess plate value
13070 *==========================================================================*/
13071cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13072{
13073 char prop[PROPERTY_VALUE_MAX];
13074 memset(prop, 0, sizeof(prop));
13075 property_get("persist.tnr.process.plates", prop, "0");
13076 int processPlate = atoi(prop);
13077 switch(processPlate) {
13078 case 0:
13079 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13080 case 1:
13081 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13082 case 2:
13083 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13084 case 3:
13085 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13086 default:
13087 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13088 }
13089}
13090
13091
13092/*===========================================================================
13093 * FUNCTION : extractSceneMode
13094 *
13095 * DESCRIPTION: Extract scene mode from frameworks set metadata
13096 *
13097 * PARAMETERS :
13098 * @frame_settings: CameraMetadata reference
13099 * @metaMode: ANDROID_CONTORL_MODE
13100 * @hal_metadata: hal metadata structure
13101 *
13102 * RETURN : None
13103 *==========================================================================*/
13104int32_t QCamera3HardwareInterface::extractSceneMode(
13105 const CameraMetadata &frame_settings, uint8_t metaMode,
13106 metadata_buffer_t *hal_metadata)
13107{
13108 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013109 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13110
13111 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13112 LOGD("Ignoring control mode OFF_KEEP_STATE");
13113 return NO_ERROR;
13114 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013115
13116 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13117 camera_metadata_ro_entry entry =
13118 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13119 if (0 == entry.count)
13120 return rc;
13121
13122 uint8_t fwk_sceneMode = entry.data.u8[0];
13123
13124 int val = lookupHalName(SCENE_MODES_MAP,
13125 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13126 fwk_sceneMode);
13127 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013128 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013129 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013130 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013131 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013132
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013133 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13134 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13135 }
13136
13137 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13138 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013139 cam_hdr_param_t hdr_params;
13140 hdr_params.hdr_enable = 1;
13141 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13142 hdr_params.hdr_need_1x = false;
13143 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13144 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13145 rc = BAD_VALUE;
13146 }
13147 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013148
Thierry Strudel3d639192016-09-09 11:52:26 -070013149 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13150 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13151 rc = BAD_VALUE;
13152 }
13153 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013154
13155 if (mForceHdrSnapshot) {
13156 cam_hdr_param_t hdr_params;
13157 hdr_params.hdr_enable = 1;
13158 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13159 hdr_params.hdr_need_1x = false;
13160 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13161 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13162 rc = BAD_VALUE;
13163 }
13164 }
13165
Thierry Strudel3d639192016-09-09 11:52:26 -070013166 return rc;
13167}
13168
13169/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013170 * FUNCTION : setVideoHdrMode
13171 *
13172 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13173 *
13174 * PARAMETERS :
13175 * @hal_metadata: hal metadata structure
13176 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13177 *
13178 * RETURN : None
13179 *==========================================================================*/
13180int32_t QCamera3HardwareInterface::setVideoHdrMode(
13181 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13182{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013183 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13184 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13185 }
13186
13187 LOGE("Invalid Video HDR mode %d!", vhdr);
13188 return BAD_VALUE;
13189}
13190
13191/*===========================================================================
13192 * FUNCTION : setSensorHDR
13193 *
13194 * DESCRIPTION: Enable/disable sensor HDR.
13195 *
13196 * PARAMETERS :
13197 * @hal_metadata: hal metadata structure
13198 * @enable: boolean whether to enable/disable sensor HDR
13199 *
13200 * RETURN : None
13201 *==========================================================================*/
13202int32_t QCamera3HardwareInterface::setSensorHDR(
13203 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13204{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013205 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013206 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13207
13208 if (enable) {
13209 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13210 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13211 #ifdef _LE_CAMERA_
13212 //Default to staggered HDR for IOT
13213 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13214 #else
13215 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13216 #endif
13217 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13218 }
13219
13220 bool isSupported = false;
13221 switch (sensor_hdr) {
13222 case CAM_SENSOR_HDR_IN_SENSOR:
13223 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13224 CAM_QCOM_FEATURE_SENSOR_HDR) {
13225 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013226 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013227 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013228 break;
13229 case CAM_SENSOR_HDR_ZIGZAG:
13230 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13231 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13232 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013233 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013234 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013235 break;
13236 case CAM_SENSOR_HDR_STAGGERED:
13237 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13238 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13239 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013240 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013241 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013242 break;
13243 case CAM_SENSOR_HDR_OFF:
13244 isSupported = true;
13245 LOGD("Turning off sensor HDR");
13246 break;
13247 default:
13248 LOGE("HDR mode %d not supported", sensor_hdr);
13249 rc = BAD_VALUE;
13250 break;
13251 }
13252
13253 if(isSupported) {
13254 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13255 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13256 rc = BAD_VALUE;
13257 } else {
13258 if(!isVideoHdrEnable)
13259 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013260 }
13261 }
13262 return rc;
13263}
13264
13265/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013266 * FUNCTION : needRotationReprocess
13267 *
13268 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13269 *
13270 * PARAMETERS : none
13271 *
13272 * RETURN : true: needed
13273 * false: no need
13274 *==========================================================================*/
13275bool QCamera3HardwareInterface::needRotationReprocess()
13276{
13277 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13278 // current rotation is not zero, and pp has the capability to process rotation
13279 LOGH("need do reprocess for rotation");
13280 return true;
13281 }
13282
13283 return false;
13284}
13285
13286/*===========================================================================
13287 * FUNCTION : needReprocess
13288 *
13289 * DESCRIPTION: if reprocess in needed
13290 *
13291 * PARAMETERS : none
13292 *
13293 * RETURN : true: needed
13294 * false: no need
13295 *==========================================================================*/
13296bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13297{
13298 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13299 // TODO: add for ZSL HDR later
13300 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13301 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13302 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13303 return true;
13304 } else {
13305 LOGH("already post processed frame");
13306 return false;
13307 }
13308 }
13309 return needRotationReprocess();
13310}
13311
13312/*===========================================================================
13313 * FUNCTION : needJpegExifRotation
13314 *
13315 * DESCRIPTION: if rotation from jpeg is needed
13316 *
13317 * PARAMETERS : none
13318 *
13319 * RETURN : true: needed
13320 * false: no need
13321 *==========================================================================*/
13322bool QCamera3HardwareInterface::needJpegExifRotation()
13323{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013324 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013325 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13326 LOGD("Need use Jpeg EXIF Rotation");
13327 return true;
13328 }
13329 return false;
13330}
13331
13332/*===========================================================================
13333 * FUNCTION : addOfflineReprocChannel
13334 *
13335 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13336 * coming from input channel
13337 *
13338 * PARAMETERS :
13339 * @config : reprocess configuration
13340 * @inputChHandle : pointer to the input (source) channel
13341 *
13342 *
13343 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13344 *==========================================================================*/
13345QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13346 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13347{
13348 int32_t rc = NO_ERROR;
13349 QCamera3ReprocessChannel *pChannel = NULL;
13350
13351 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013352 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13353 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013354 if (NULL == pChannel) {
13355 LOGE("no mem for reprocess channel");
13356 return NULL;
13357 }
13358
13359 rc = pChannel->initialize(IS_TYPE_NONE);
13360 if (rc != NO_ERROR) {
13361 LOGE("init reprocess channel failed, ret = %d", rc);
13362 delete pChannel;
13363 return NULL;
13364 }
13365
13366 // pp feature config
13367 cam_pp_feature_config_t pp_config;
13368 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13369
13370 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13371 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13372 & CAM_QCOM_FEATURE_DSDN) {
13373 //Use CPP CDS incase h/w supports it.
13374 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13375 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13376 }
13377 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13378 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13379 }
13380
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013381 if (config.hdr_param.hdr_enable) {
13382 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13383 pp_config.hdr_param = config.hdr_param;
13384 }
13385
13386 if (mForceHdrSnapshot) {
13387 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13388 pp_config.hdr_param.hdr_enable = 1;
13389 pp_config.hdr_param.hdr_need_1x = 0;
13390 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13391 }
13392
Thierry Strudel3d639192016-09-09 11:52:26 -070013393 rc = pChannel->addReprocStreamsFromSource(pp_config,
13394 config,
13395 IS_TYPE_NONE,
13396 mMetadataChannel);
13397
13398 if (rc != NO_ERROR) {
13399 delete pChannel;
13400 return NULL;
13401 }
13402 return pChannel;
13403}
13404
13405/*===========================================================================
13406 * FUNCTION : getMobicatMask
13407 *
13408 * DESCRIPTION: returns mobicat mask
13409 *
13410 * PARAMETERS : none
13411 *
13412 * RETURN : mobicat mask
13413 *
13414 *==========================================================================*/
13415uint8_t QCamera3HardwareInterface::getMobicatMask()
13416{
13417 return m_MobicatMask;
13418}
13419
13420/*===========================================================================
13421 * FUNCTION : setMobicat
13422 *
13423 * DESCRIPTION: set Mobicat on/off.
13424 *
13425 * PARAMETERS :
13426 * @params : none
13427 *
13428 * RETURN : int32_t type of status
13429 * NO_ERROR -- success
13430 * none-zero failure code
13431 *==========================================================================*/
13432int32_t QCamera3HardwareInterface::setMobicat()
13433{
13434 char value [PROPERTY_VALUE_MAX];
13435 property_get("persist.camera.mobicat", value, "0");
13436 int32_t ret = NO_ERROR;
13437 uint8_t enableMobi = (uint8_t)atoi(value);
13438
13439 if (enableMobi) {
13440 tune_cmd_t tune_cmd;
13441 tune_cmd.type = SET_RELOAD_CHROMATIX;
13442 tune_cmd.module = MODULE_ALL;
13443 tune_cmd.value = TRUE;
13444 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13445 CAM_INTF_PARM_SET_VFE_COMMAND,
13446 tune_cmd);
13447
13448 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13449 CAM_INTF_PARM_SET_PP_COMMAND,
13450 tune_cmd);
13451 }
13452 m_MobicatMask = enableMobi;
13453
13454 return ret;
13455}
13456
13457/*===========================================================================
13458* FUNCTION : getLogLevel
13459*
13460* DESCRIPTION: Reads the log level property into a variable
13461*
13462* PARAMETERS :
13463* None
13464*
13465* RETURN :
13466* None
13467*==========================================================================*/
13468void QCamera3HardwareInterface::getLogLevel()
13469{
13470 char prop[PROPERTY_VALUE_MAX];
13471 uint32_t globalLogLevel = 0;
13472
13473 property_get("persist.camera.hal.debug", prop, "0");
13474 int val = atoi(prop);
13475 if (0 <= val) {
13476 gCamHal3LogLevel = (uint32_t)val;
13477 }
13478
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013479 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013480 gKpiDebugLevel = atoi(prop);
13481
13482 property_get("persist.camera.global.debug", prop, "0");
13483 val = atoi(prop);
13484 if (0 <= val) {
13485 globalLogLevel = (uint32_t)val;
13486 }
13487
13488 /* Highest log level among hal.logs and global.logs is selected */
13489 if (gCamHal3LogLevel < globalLogLevel)
13490 gCamHal3LogLevel = globalLogLevel;
13491
13492 return;
13493}
13494
13495/*===========================================================================
13496 * FUNCTION : validateStreamRotations
13497 *
13498 * DESCRIPTION: Check if the rotations requested are supported
13499 *
13500 * PARAMETERS :
13501 * @stream_list : streams to be configured
13502 *
13503 * RETURN : NO_ERROR on success
13504 * -EINVAL on failure
13505 *
13506 *==========================================================================*/
13507int QCamera3HardwareInterface::validateStreamRotations(
13508 camera3_stream_configuration_t *streamList)
13509{
13510 int rc = NO_ERROR;
13511
13512 /*
13513 * Loop through all streams requested in configuration
13514 * Check if unsupported rotations have been requested on any of them
13515 */
13516 for (size_t j = 0; j < streamList->num_streams; j++){
13517 camera3_stream_t *newStream = streamList->streams[j];
13518
13519 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13520 bool isImplDef = (newStream->format ==
13521 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13522 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13523 isImplDef);
13524
13525 if (isRotated && (!isImplDef || isZsl)) {
13526 LOGE("Error: Unsupported rotation of %d requested for stream"
13527 "type:%d and stream format:%d",
13528 newStream->rotation, newStream->stream_type,
13529 newStream->format);
13530 rc = -EINVAL;
13531 break;
13532 }
13533 }
13534
13535 return rc;
13536}
13537
13538/*===========================================================================
13539* FUNCTION : getFlashInfo
13540*
13541* DESCRIPTION: Retrieve information about whether the device has a flash.
13542*
13543* PARAMETERS :
13544* @cameraId : Camera id to query
13545* @hasFlash : Boolean indicating whether there is a flash device
13546* associated with given camera
13547* @flashNode : If a flash device exists, this will be its device node.
13548*
13549* RETURN :
13550* None
13551*==========================================================================*/
13552void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13553 bool& hasFlash,
13554 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13555{
13556 cam_capability_t* camCapability = gCamCapability[cameraId];
13557 if (NULL == camCapability) {
13558 hasFlash = false;
13559 flashNode[0] = '\0';
13560 } else {
13561 hasFlash = camCapability->flash_available;
13562 strlcpy(flashNode,
13563 (char*)camCapability->flash_dev_name,
13564 QCAMERA_MAX_FILEPATH_LENGTH);
13565 }
13566}
13567
13568/*===========================================================================
13569* FUNCTION : getEepromVersionInfo
13570*
13571* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13572*
13573* PARAMETERS : None
13574*
13575* RETURN : string describing EEPROM version
13576* "\0" if no such info available
13577*==========================================================================*/
13578const char *QCamera3HardwareInterface::getEepromVersionInfo()
13579{
13580 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13581}
13582
13583/*===========================================================================
13584* FUNCTION : getLdafCalib
13585*
13586* DESCRIPTION: Retrieve Laser AF calibration data
13587*
13588* PARAMETERS : None
13589*
13590* RETURN : Two uint32_t describing laser AF calibration data
13591* NULL if none is available.
13592*==========================================================================*/
13593const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13594{
13595 if (mLdafCalibExist) {
13596 return &mLdafCalib[0];
13597 } else {
13598 return NULL;
13599 }
13600}
13601
13602/*===========================================================================
13603 * FUNCTION : dynamicUpdateMetaStreamInfo
13604 *
13605 * DESCRIPTION: This function:
13606 * (1) stops all the channels
13607 * (2) returns error on pending requests and buffers
13608 * (3) sends metastream_info in setparams
13609 * (4) starts all channels
13610 * This is useful when sensor has to be restarted to apply any
13611 * settings such as frame rate from a different sensor mode
13612 *
13613 * PARAMETERS : None
13614 *
13615 * RETURN : NO_ERROR on success
13616 * Error codes on failure
13617 *
13618 *==========================================================================*/
13619int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13620{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013621 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013622 int rc = NO_ERROR;
13623
13624 LOGD("E");
13625
13626 rc = stopAllChannels();
13627 if (rc < 0) {
13628 LOGE("stopAllChannels failed");
13629 return rc;
13630 }
13631
13632 rc = notifyErrorForPendingRequests();
13633 if (rc < 0) {
13634 LOGE("notifyErrorForPendingRequests failed");
13635 return rc;
13636 }
13637
13638 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13639 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13640 "Format:%d",
13641 mStreamConfigInfo.type[i],
13642 mStreamConfigInfo.stream_sizes[i].width,
13643 mStreamConfigInfo.stream_sizes[i].height,
13644 mStreamConfigInfo.postprocess_mask[i],
13645 mStreamConfigInfo.format[i]);
13646 }
13647
13648 /* Send meta stream info once again so that ISP can start */
13649 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13650 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13651 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13652 mParameters);
13653 if (rc < 0) {
13654 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13655 }
13656
13657 rc = startAllChannels();
13658 if (rc < 0) {
13659 LOGE("startAllChannels failed");
13660 return rc;
13661 }
13662
13663 LOGD("X");
13664 return rc;
13665}
13666
13667/*===========================================================================
13668 * FUNCTION : stopAllChannels
13669 *
13670 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13671 *
13672 * PARAMETERS : None
13673 *
13674 * RETURN : NO_ERROR on success
13675 * Error codes on failure
13676 *
13677 *==========================================================================*/
13678int32_t QCamera3HardwareInterface::stopAllChannels()
13679{
13680 int32_t rc = NO_ERROR;
13681
13682 LOGD("Stopping all channels");
13683 // Stop the Streams/Channels
13684 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13685 it != mStreamInfo.end(); it++) {
13686 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13687 if (channel) {
13688 channel->stop();
13689 }
13690 (*it)->status = INVALID;
13691 }
13692
13693 if (mSupportChannel) {
13694 mSupportChannel->stop();
13695 }
13696 if (mAnalysisChannel) {
13697 mAnalysisChannel->stop();
13698 }
13699 if (mRawDumpChannel) {
13700 mRawDumpChannel->stop();
13701 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013702 if (mHdrPlusRawSrcChannel) {
13703 mHdrPlusRawSrcChannel->stop();
13704 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013705 if (mMetadataChannel) {
13706 /* If content of mStreamInfo is not 0, there is metadata stream */
13707 mMetadataChannel->stop();
13708 }
13709
13710 LOGD("All channels stopped");
13711 return rc;
13712}
13713
13714/*===========================================================================
13715 * FUNCTION : startAllChannels
13716 *
13717 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13718 *
13719 * PARAMETERS : None
13720 *
13721 * RETURN : NO_ERROR on success
13722 * Error codes on failure
13723 *
13724 *==========================================================================*/
13725int32_t QCamera3HardwareInterface::startAllChannels()
13726{
13727 int32_t rc = NO_ERROR;
13728
13729 LOGD("Start all channels ");
13730 // Start the Streams/Channels
13731 if (mMetadataChannel) {
13732 /* If content of mStreamInfo is not 0, there is metadata stream */
13733 rc = mMetadataChannel->start();
13734 if (rc < 0) {
13735 LOGE("META channel start failed");
13736 return rc;
13737 }
13738 }
13739 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13740 it != mStreamInfo.end(); it++) {
13741 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13742 if (channel) {
13743 rc = channel->start();
13744 if (rc < 0) {
13745 LOGE("channel start failed");
13746 return rc;
13747 }
13748 }
13749 }
13750 if (mAnalysisChannel) {
13751 mAnalysisChannel->start();
13752 }
13753 if (mSupportChannel) {
13754 rc = mSupportChannel->start();
13755 if (rc < 0) {
13756 LOGE("Support channel start failed");
13757 return rc;
13758 }
13759 }
13760 if (mRawDumpChannel) {
13761 rc = mRawDumpChannel->start();
13762 if (rc < 0) {
13763 LOGE("RAW dump channel start failed");
13764 return rc;
13765 }
13766 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013767 if (mHdrPlusRawSrcChannel) {
13768 rc = mHdrPlusRawSrcChannel->start();
13769 if (rc < 0) {
13770 LOGE("HDR+ RAW channel start failed");
13771 return rc;
13772 }
13773 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013774
13775 LOGD("All channels started");
13776 return rc;
13777}
13778
13779/*===========================================================================
13780 * FUNCTION : notifyErrorForPendingRequests
13781 *
13782 * DESCRIPTION: This function sends error for all the pending requests/buffers
13783 *
13784 * PARAMETERS : None
13785 *
13786 * RETURN : Error codes
13787 * NO_ERROR on success
13788 *
13789 *==========================================================================*/
13790int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13791{
13792 int32_t rc = NO_ERROR;
13793 unsigned int frameNum = 0;
13794 camera3_capture_result_t result;
13795 camera3_stream_buffer_t *pStream_Buf = NULL;
13796
13797 memset(&result, 0, sizeof(camera3_capture_result_t));
13798
13799 if (mPendingRequestsList.size() > 0) {
13800 pendingRequestIterator i = mPendingRequestsList.begin();
13801 frameNum = i->frame_number;
13802 } else {
13803 /* There might still be pending buffers even though there are
13804 no pending requests. Setting the frameNum to MAX so that
13805 all the buffers with smaller frame numbers are returned */
13806 frameNum = UINT_MAX;
13807 }
13808
13809 LOGH("Oldest frame num on mPendingRequestsList = %u",
13810 frameNum);
13811
Emilian Peev7650c122017-01-19 08:24:33 -080013812 notifyErrorFoPendingDepthData(mDepthChannel);
13813
Thierry Strudel3d639192016-09-09 11:52:26 -070013814 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13815 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13816
13817 if (req->frame_number < frameNum) {
13818 // Send Error notify to frameworks for each buffer for which
13819 // metadata buffer is already sent
13820 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13821 req->frame_number, req->mPendingBufferList.size());
13822
13823 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13824 if (NULL == pStream_Buf) {
13825 LOGE("No memory for pending buffers array");
13826 return NO_MEMORY;
13827 }
13828 memset(pStream_Buf, 0,
13829 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13830 result.result = NULL;
13831 result.frame_number = req->frame_number;
13832 result.num_output_buffers = req->mPendingBufferList.size();
13833 result.output_buffers = pStream_Buf;
13834
13835 size_t index = 0;
13836 for (auto info = req->mPendingBufferList.begin();
13837 info != req->mPendingBufferList.end(); ) {
13838
13839 camera3_notify_msg_t notify_msg;
13840 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13841 notify_msg.type = CAMERA3_MSG_ERROR;
13842 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13843 notify_msg.message.error.error_stream = info->stream;
13844 notify_msg.message.error.frame_number = req->frame_number;
13845 pStream_Buf[index].acquire_fence = -1;
13846 pStream_Buf[index].release_fence = -1;
13847 pStream_Buf[index].buffer = info->buffer;
13848 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13849 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013850 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013851 index++;
13852 // Remove buffer from list
13853 info = req->mPendingBufferList.erase(info);
13854 }
13855
13856 // Remove this request from Map
13857 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13858 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13859 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13860
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013861 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013862
13863 delete [] pStream_Buf;
13864 } else {
13865
13866 // Go through the pending requests info and send error request to framework
13867 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13868
13869 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13870
13871 // Send error notify to frameworks
13872 camera3_notify_msg_t notify_msg;
13873 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13874 notify_msg.type = CAMERA3_MSG_ERROR;
13875 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13876 notify_msg.message.error.error_stream = NULL;
13877 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013878 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013879
13880 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13881 if (NULL == pStream_Buf) {
13882 LOGE("No memory for pending buffers array");
13883 return NO_MEMORY;
13884 }
13885 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13886
13887 result.result = NULL;
13888 result.frame_number = req->frame_number;
13889 result.input_buffer = i->input_buffer;
13890 result.num_output_buffers = req->mPendingBufferList.size();
13891 result.output_buffers = pStream_Buf;
13892
13893 size_t index = 0;
13894 for (auto info = req->mPendingBufferList.begin();
13895 info != req->mPendingBufferList.end(); ) {
13896 pStream_Buf[index].acquire_fence = -1;
13897 pStream_Buf[index].release_fence = -1;
13898 pStream_Buf[index].buffer = info->buffer;
13899 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13900 pStream_Buf[index].stream = info->stream;
13901 index++;
13902 // Remove buffer from list
13903 info = req->mPendingBufferList.erase(info);
13904 }
13905
13906 // Remove this request from Map
13907 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13908 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13909 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13910
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013911 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013912 delete [] pStream_Buf;
13913 i = erasePendingRequest(i);
13914 }
13915 }
13916
13917 /* Reset pending frame Drop list and requests list */
13918 mPendingFrameDropList.clear();
13919
13920 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13921 req.mPendingBufferList.clear();
13922 }
13923 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013924 LOGH("Cleared all the pending buffers ");
13925
13926 return rc;
13927}
13928
13929bool QCamera3HardwareInterface::isOnEncoder(
13930 const cam_dimension_t max_viewfinder_size,
13931 uint32_t width, uint32_t height)
13932{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013933 return ((width > (uint32_t)max_viewfinder_size.width) ||
13934 (height > (uint32_t)max_viewfinder_size.height) ||
13935 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13936 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013937}
13938
13939/*===========================================================================
13940 * FUNCTION : setBundleInfo
13941 *
13942 * DESCRIPTION: Set bundle info for all streams that are bundle.
13943 *
13944 * PARAMETERS : None
13945 *
13946 * RETURN : NO_ERROR on success
13947 * Error codes on failure
13948 *==========================================================================*/
13949int32_t QCamera3HardwareInterface::setBundleInfo()
13950{
13951 int32_t rc = NO_ERROR;
13952
13953 if (mChannelHandle) {
13954 cam_bundle_config_t bundleInfo;
13955 memset(&bundleInfo, 0, sizeof(bundleInfo));
13956 rc = mCameraHandle->ops->get_bundle_info(
13957 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13958 if (rc != NO_ERROR) {
13959 LOGE("get_bundle_info failed");
13960 return rc;
13961 }
13962 if (mAnalysisChannel) {
13963 mAnalysisChannel->setBundleInfo(bundleInfo);
13964 }
13965 if (mSupportChannel) {
13966 mSupportChannel->setBundleInfo(bundleInfo);
13967 }
13968 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13969 it != mStreamInfo.end(); it++) {
13970 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13971 channel->setBundleInfo(bundleInfo);
13972 }
13973 if (mRawDumpChannel) {
13974 mRawDumpChannel->setBundleInfo(bundleInfo);
13975 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013976 if (mHdrPlusRawSrcChannel) {
13977 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13978 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013979 }
13980
13981 return rc;
13982}
13983
13984/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013985 * FUNCTION : setInstantAEC
13986 *
13987 * DESCRIPTION: Set Instant AEC related params.
13988 *
13989 * PARAMETERS :
13990 * @meta: CameraMetadata reference
13991 *
13992 * RETURN : NO_ERROR on success
13993 * Error codes on failure
13994 *==========================================================================*/
13995int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
13996{
13997 int32_t rc = NO_ERROR;
13998 uint8_t val = 0;
13999 char prop[PROPERTY_VALUE_MAX];
14000
14001 // First try to configure instant AEC from framework metadata
14002 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14003 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14004 }
14005
14006 // If framework did not set this value, try to read from set prop.
14007 if (val == 0) {
14008 memset(prop, 0, sizeof(prop));
14009 property_get("persist.camera.instant.aec", prop, "0");
14010 val = (uint8_t)atoi(prop);
14011 }
14012
14013 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14014 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14015 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14016 mInstantAEC = val;
14017 mInstantAECSettledFrameNumber = 0;
14018 mInstantAecFrameIdxCount = 0;
14019 LOGH("instantAEC value set %d",val);
14020 if (mInstantAEC) {
14021 memset(prop, 0, sizeof(prop));
14022 property_get("persist.camera.ae.instant.bound", prop, "10");
14023 int32_t aec_frame_skip_cnt = atoi(prop);
14024 if (aec_frame_skip_cnt >= 0) {
14025 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14026 } else {
14027 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14028 rc = BAD_VALUE;
14029 }
14030 }
14031 } else {
14032 LOGE("Bad instant aec value set %d", val);
14033 rc = BAD_VALUE;
14034 }
14035 return rc;
14036}
14037
14038/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014039 * FUNCTION : get_num_overall_buffers
14040 *
14041 * DESCRIPTION: Estimate number of pending buffers across all requests.
14042 *
14043 * PARAMETERS : None
14044 *
14045 * RETURN : Number of overall pending buffers
14046 *
14047 *==========================================================================*/
14048uint32_t PendingBuffersMap::get_num_overall_buffers()
14049{
14050 uint32_t sum_buffers = 0;
14051 for (auto &req : mPendingBuffersInRequest) {
14052 sum_buffers += req.mPendingBufferList.size();
14053 }
14054 return sum_buffers;
14055}
14056
14057/*===========================================================================
14058 * FUNCTION : removeBuf
14059 *
14060 * DESCRIPTION: Remove a matching buffer from tracker.
14061 *
14062 * PARAMETERS : @buffer: image buffer for the callback
14063 *
14064 * RETURN : None
14065 *
14066 *==========================================================================*/
14067void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14068{
14069 bool buffer_found = false;
14070 for (auto req = mPendingBuffersInRequest.begin();
14071 req != mPendingBuffersInRequest.end(); req++) {
14072 for (auto k = req->mPendingBufferList.begin();
14073 k != req->mPendingBufferList.end(); k++ ) {
14074 if (k->buffer == buffer) {
14075 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14076 req->frame_number, buffer);
14077 k = req->mPendingBufferList.erase(k);
14078 if (req->mPendingBufferList.empty()) {
14079 // Remove this request from Map
14080 req = mPendingBuffersInRequest.erase(req);
14081 }
14082 buffer_found = true;
14083 break;
14084 }
14085 }
14086 if (buffer_found) {
14087 break;
14088 }
14089 }
14090 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14091 get_num_overall_buffers());
14092}
14093
14094/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014095 * FUNCTION : getBufErrStatus
14096 *
14097 * DESCRIPTION: get buffer error status
14098 *
14099 * PARAMETERS : @buffer: buffer handle
14100 *
14101 * RETURN : Error status
14102 *
14103 *==========================================================================*/
14104int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14105{
14106 for (auto& req : mPendingBuffersInRequest) {
14107 for (auto& k : req.mPendingBufferList) {
14108 if (k.buffer == buffer)
14109 return k.bufStatus;
14110 }
14111 }
14112 return CAMERA3_BUFFER_STATUS_OK;
14113}
14114
14115/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014116 * FUNCTION : setPAAFSupport
14117 *
14118 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14119 * feature mask according to stream type and filter
14120 * arrangement
14121 *
14122 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14123 * @stream_type: stream type
14124 * @filter_arrangement: filter arrangement
14125 *
14126 * RETURN : None
14127 *==========================================================================*/
14128void QCamera3HardwareInterface::setPAAFSupport(
14129 cam_feature_mask_t& feature_mask,
14130 cam_stream_type_t stream_type,
14131 cam_color_filter_arrangement_t filter_arrangement)
14132{
Thierry Strudel3d639192016-09-09 11:52:26 -070014133 switch (filter_arrangement) {
14134 case CAM_FILTER_ARRANGEMENT_RGGB:
14135 case CAM_FILTER_ARRANGEMENT_GRBG:
14136 case CAM_FILTER_ARRANGEMENT_GBRG:
14137 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014138 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14139 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014140 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014141 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14142 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014143 }
14144 break;
14145 case CAM_FILTER_ARRANGEMENT_Y:
14146 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14147 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14148 }
14149 break;
14150 default:
14151 break;
14152 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014153 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14154 feature_mask, stream_type, filter_arrangement);
14155
14156
Thierry Strudel3d639192016-09-09 11:52:26 -070014157}
14158
14159/*===========================================================================
14160* FUNCTION : getSensorMountAngle
14161*
14162* DESCRIPTION: Retrieve sensor mount angle
14163*
14164* PARAMETERS : None
14165*
14166* RETURN : sensor mount angle in uint32_t
14167*==========================================================================*/
14168uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14169{
14170 return gCamCapability[mCameraId]->sensor_mount_angle;
14171}
14172
14173/*===========================================================================
14174* FUNCTION : getRelatedCalibrationData
14175*
14176* DESCRIPTION: Retrieve related system calibration data
14177*
14178* PARAMETERS : None
14179*
14180* RETURN : Pointer of related system calibration data
14181*==========================================================================*/
14182const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14183{
14184 return (const cam_related_system_calibration_data_t *)
14185 &(gCamCapability[mCameraId]->related_cam_calibration);
14186}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014187
14188/*===========================================================================
14189 * FUNCTION : is60HzZone
14190 *
14191 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14192 *
14193 * PARAMETERS : None
14194 *
14195 * RETURN : True if in 60Hz zone, False otherwise
14196 *==========================================================================*/
14197bool QCamera3HardwareInterface::is60HzZone()
14198{
14199 time_t t = time(NULL);
14200 struct tm lt;
14201
14202 struct tm* r = localtime_r(&t, &lt);
14203
14204 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14205 return true;
14206 else
14207 return false;
14208}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014209
14210/*===========================================================================
14211 * FUNCTION : adjustBlackLevelForCFA
14212 *
14213 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14214 * of bayer CFA (Color Filter Array).
14215 *
14216 * PARAMETERS : @input: black level pattern in the order of RGGB
14217 * @output: black level pattern in the order of CFA
14218 * @color_arrangement: CFA color arrangement
14219 *
14220 * RETURN : None
14221 *==========================================================================*/
14222template<typename T>
14223void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14224 T input[BLACK_LEVEL_PATTERN_CNT],
14225 T output[BLACK_LEVEL_PATTERN_CNT],
14226 cam_color_filter_arrangement_t color_arrangement)
14227{
14228 switch (color_arrangement) {
14229 case CAM_FILTER_ARRANGEMENT_GRBG:
14230 output[0] = input[1];
14231 output[1] = input[0];
14232 output[2] = input[3];
14233 output[3] = input[2];
14234 break;
14235 case CAM_FILTER_ARRANGEMENT_GBRG:
14236 output[0] = input[2];
14237 output[1] = input[3];
14238 output[2] = input[0];
14239 output[3] = input[1];
14240 break;
14241 case CAM_FILTER_ARRANGEMENT_BGGR:
14242 output[0] = input[3];
14243 output[1] = input[2];
14244 output[2] = input[1];
14245 output[3] = input[0];
14246 break;
14247 case CAM_FILTER_ARRANGEMENT_RGGB:
14248 output[0] = input[0];
14249 output[1] = input[1];
14250 output[2] = input[2];
14251 output[3] = input[3];
14252 break;
14253 default:
14254 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14255 break;
14256 }
14257}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014258
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014259void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14260 CameraMetadata &resultMetadata,
14261 std::shared_ptr<metadata_buffer_t> settings)
14262{
14263 if (settings == nullptr) {
14264 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14265 return;
14266 }
14267
14268 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14269 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14270 }
14271
14272 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14273 String8 str((const char *)gps_methods);
14274 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14275 }
14276
14277 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14278 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14279 }
14280
14281 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14282 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14283 }
14284
14285 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14286 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14287 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14288 }
14289
14290 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14291 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14292 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14293 }
14294
14295 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14296 int32_t fwk_thumb_size[2];
14297 fwk_thumb_size[0] = thumb_size->width;
14298 fwk_thumb_size[1] = thumb_size->height;
14299 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14300 }
14301
14302 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14303 uint8_t fwk_intent = intent[0];
14304 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14305 }
14306}
14307
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014308bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14309 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14310 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014311{
14312 if (hdrPlusRequest == nullptr) return false;
14313
14314 // Check noise reduction mode is high quality.
14315 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14316 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14317 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014318 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14319 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014320 return false;
14321 }
14322
14323 // Check edge mode is high quality.
14324 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14325 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14326 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14327 return false;
14328 }
14329
14330 if (request.num_output_buffers != 1 ||
14331 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14332 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014333 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14334 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14335 request.output_buffers[0].stream->width,
14336 request.output_buffers[0].stream->height,
14337 request.output_buffers[0].stream->format);
14338 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014339 return false;
14340 }
14341
14342 // Get a YUV buffer from pic channel.
14343 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14344 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14345 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14346 if (res != OK) {
14347 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14348 __FUNCTION__, strerror(-res), res);
14349 return false;
14350 }
14351
14352 pbcamera::StreamBuffer buffer;
14353 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014354 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014355 buffer.data = yuvBuffer->buffer;
14356 buffer.dataSize = yuvBuffer->frame_len;
14357
14358 pbcamera::CaptureRequest pbRequest;
14359 pbRequest.id = request.frame_number;
14360 pbRequest.outputBuffers.push_back(buffer);
14361
14362 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014363 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014364 if (res != OK) {
14365 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14366 strerror(-res), res);
14367 return false;
14368 }
14369
14370 hdrPlusRequest->yuvBuffer = yuvBuffer;
14371 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14372
14373 return true;
14374}
14375
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014376status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked() {
14377 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14378 return OK;
14379 }
14380
14381 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14382 if (res != OK) {
14383 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14384 strerror(-res), res);
14385 return res;
14386 }
14387 gHdrPlusClientOpening = true;
14388
14389 return OK;
14390}
14391
Chien-Yu Chenee335912017-02-09 17:53:20 -080014392status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14393{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014394 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014395
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014396 // Check if gHdrPlusClient is opened or being opened.
14397 if (gHdrPlusClient == nullptr) {
14398 if (gHdrPlusClientOpening) {
14399 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14400 return OK;
14401 }
14402
14403 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014404 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014405 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14406 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014407 return res;
14408 }
14409
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014410 // When opening HDR+ client completes, HDR+ mode will be enabled.
14411 return OK;
14412
Chien-Yu Chenee335912017-02-09 17:53:20 -080014413 }
14414
14415 // Configure stream for HDR+.
14416 res = configureHdrPlusStreamsLocked();
14417 if (res != OK) {
14418 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014419 return res;
14420 }
14421
14422 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14423 res = gHdrPlusClient->setZslHdrPlusMode(true);
14424 if (res != OK) {
14425 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014426 return res;
14427 }
14428
14429 mHdrPlusModeEnabled = true;
14430 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14431
14432 return OK;
14433}
14434
14435void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14436{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014437 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014438 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014439 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14440 if (res != OK) {
14441 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14442 }
Chien-Yu Chenee335912017-02-09 17:53:20 -080014443 }
14444
14445 mHdrPlusModeEnabled = false;
14446 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14447}
14448
14449status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014450{
14451 pbcamera::InputConfiguration inputConfig;
14452 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14453 status_t res = OK;
14454
14455 // Configure HDR+ client streams.
14456 // Get input config.
14457 if (mHdrPlusRawSrcChannel) {
14458 // HDR+ input buffers will be provided by HAL.
14459 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14460 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14461 if (res != OK) {
14462 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14463 __FUNCTION__, strerror(-res), res);
14464 return res;
14465 }
14466
14467 inputConfig.isSensorInput = false;
14468 } else {
14469 // Sensor MIPI will send data to Easel.
14470 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014471 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014472 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14473 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14474 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14475 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14476 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14477 if (mSensorModeInfo.num_raw_bits != 10) {
14478 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14479 mSensorModeInfo.num_raw_bits);
14480 return BAD_VALUE;
14481 }
14482
14483 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014484 }
14485
14486 // Get output configurations.
14487 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014488 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014489
14490 // Easel may need to output YUV output buffers if mPictureChannel was created.
14491 pbcamera::StreamConfiguration yuvOutputConfig;
14492 if (mPictureChannel != nullptr) {
14493 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14494 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14495 if (res != OK) {
14496 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14497 __FUNCTION__, strerror(-res), res);
14498
14499 return res;
14500 }
14501
14502 outputStreamConfigs.push_back(yuvOutputConfig);
14503 }
14504
14505 // TODO: consider other channels for YUV output buffers.
14506
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014507 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014508 if (res != OK) {
14509 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14510 strerror(-res), res);
14511 return res;
14512 }
14513
14514 return OK;
14515}
14516
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014517void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client) {
14518 if (client == nullptr) {
14519 ALOGE("%s: Opened client is null.", __FUNCTION__);
14520 return;
14521 }
14522
14523 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14524
14525 Mutex::Autolock l(gHdrPlusClientLock);
14526 gHdrPlusClient = std::move(client);
14527 gHdrPlusClientOpening = false;
14528
14529 // Set static metadata.
14530 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14531 if (res != OK) {
14532 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14533 __FUNCTION__, strerror(-res), res);
14534 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14535 gHdrPlusClient = nullptr;
14536 return;
14537 }
14538
14539 // Enable HDR+ mode.
14540 res = enableHdrPlusModeLocked();
14541 if (res != OK) {
14542 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14543 }
14544}
14545
14546void QCamera3HardwareInterface::onOpenFailed(status_t err) {
14547 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14548 Mutex::Autolock l(gHdrPlusClientLock);
14549 gHdrPlusClientOpening = false;
14550}
14551
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014552void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14553 const camera_metadata_t &resultMetadata) {
14554 if (result != nullptr) {
14555 if (result->outputBuffers.size() != 1) {
14556 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14557 result->outputBuffers.size());
14558 return;
14559 }
14560
14561 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14562 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14563 result->outputBuffers[0].streamId);
14564 return;
14565 }
14566
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014567 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014568 HdrPlusPendingRequest pendingRequest;
14569 {
14570 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14571 auto req = mHdrPlusPendingRequests.find(result->requestId);
14572 pendingRequest = req->second;
14573 }
14574
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014575 // Update the result metadata with the settings of the HDR+ still capture request because
14576 // the result metadata belongs to a ZSL buffer.
14577 CameraMetadata metadata;
14578 metadata = &resultMetadata;
14579 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14580 camera_metadata_t* updatedResultMetadata = metadata.release();
14581
14582 QCamera3PicChannel *picChannel =
14583 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14584
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014585 // Check if dumping HDR+ YUV output is enabled.
14586 char prop[PROPERTY_VALUE_MAX];
14587 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14588 bool dumpYuvOutput = atoi(prop);
14589
14590 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014591 // Dump yuv buffer to a ppm file.
14592 pbcamera::StreamConfiguration outputConfig;
14593 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14594 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14595 if (rc == OK) {
14596 char buf[FILENAME_MAX] = {};
14597 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14598 result->requestId, result->outputBuffers[0].streamId,
14599 outputConfig.image.width, outputConfig.image.height);
14600
14601 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14602 } else {
14603 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14604 __FUNCTION__, strerror(-rc), rc);
14605 }
14606 }
14607
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014608 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14609 auto halMetadata = std::make_shared<metadata_buffer_t>();
14610 clear_metadata_buffer(halMetadata.get());
14611
14612 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14613 // encoding.
14614 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14615 halStreamId, /*minFrameDuration*/0);
14616 if (res == OK) {
14617 // Return the buffer to pic channel for encoding.
14618 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14619 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14620 halMetadata);
14621 } else {
14622 // Return the buffer without encoding.
14623 // TODO: This should not happen but we may want to report an error buffer to camera
14624 // service.
14625 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14626 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14627 strerror(-res), res);
14628 }
14629
14630 // Send HDR+ metadata to framework.
14631 {
14632 pthread_mutex_lock(&mMutex);
14633
14634 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14635 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14636 pthread_mutex_unlock(&mMutex);
14637 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014638
14639 // Remove the HDR+ pending request.
14640 {
14641 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14642 auto req = mHdrPlusPendingRequests.find(result->requestId);
14643 mHdrPlusPendingRequests.erase(req);
14644 }
14645 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014646}
14647
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014648void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14649 // TODO: Handle HDR+ capture failures and send the failure to framework.
14650 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14651 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14652
14653 // Return the buffer to pic channel.
14654 QCamera3PicChannel *picChannel =
14655 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14656 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14657
14658 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014659}
14660
Thierry Strudel3d639192016-09-09 11:52:26 -070014661}; //end namespace qcamera