blob: bc31d42f14550c1cb15482f8b70710b0c04a8316 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
134#define MAX_PREFERRED_ZOOM_RATIO 5.0
135
Thierry Strudel3d639192016-09-09 11:52:26 -0700136cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
137const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
138extern pthread_mutex_t gCamLock;
139volatile uint32_t gCamHal3LogLevel = 1;
140extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700141
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800142// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143// The following Easel related variables must be protected by gHdrPlusClientLock.
144EaselManagerClient gEaselManagerClient;
145bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
146std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
147bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700148bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700149bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800151// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
152bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
154Mutex gHdrPlusClientLock; // Protect above Easel related variables.
155
Thierry Strudel3d639192016-09-09 11:52:26 -0700156
157const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
158 {"On", CAM_CDS_MODE_ON},
159 {"Off", CAM_CDS_MODE_OFF},
160 {"Auto",CAM_CDS_MODE_AUTO}
161};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700162const QCamera3HardwareInterface::QCameraMap<
163 camera_metadata_enum_android_video_hdr_mode_t,
164 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
165 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
166 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
167};
168
Thierry Strudel54dc9782017-02-15 12:12:10 -0800169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_binning_correction_mode_t,
171 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
172 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
173 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
174};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700175
176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_ir_mode_t,
178 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
179 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
180 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
181 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
182};
Thierry Strudel3d639192016-09-09 11:52:26 -0700183
184const QCamera3HardwareInterface::QCameraMap<
185 camera_metadata_enum_android_control_effect_mode_t,
186 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
187 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
188 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
189 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
190 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
191 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
192 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
193 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
194 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
195 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
196};
197
198const QCamera3HardwareInterface::QCameraMap<
199 camera_metadata_enum_android_control_awb_mode_t,
200 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
201 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
202 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
203 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
204 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
205 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
206 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
207 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
208 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
209 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
210};
211
212const QCamera3HardwareInterface::QCameraMap<
213 camera_metadata_enum_android_control_scene_mode_t,
214 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
215 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
216 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
217 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
218 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
219 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
220 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
222 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
223 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
224 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
225 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
226 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
227 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
228 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
229 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800230 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
231 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700232};
233
234const QCamera3HardwareInterface::QCameraMap<
235 camera_metadata_enum_android_control_af_mode_t,
236 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
237 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
238 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
239 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
240 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
241 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
242 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
243 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
244};
245
246const QCamera3HardwareInterface::QCameraMap<
247 camera_metadata_enum_android_color_correction_aberration_mode_t,
248 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
249 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
250 CAM_COLOR_CORRECTION_ABERRATION_OFF },
251 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
252 CAM_COLOR_CORRECTION_ABERRATION_FAST },
253 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
254 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_control_ae_antibanding_mode_t,
259 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
260 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
261 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
262 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_control_ae_mode_t,
268 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
269 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
270 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
271 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
272 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
273 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
274};
275
276const QCamera3HardwareInterface::QCameraMap<
277 camera_metadata_enum_android_flash_mode_t,
278 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
279 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
280 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
281 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_statistics_face_detect_mode_t,
286 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
287 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
288 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
289 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
294 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
295 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
296 CAM_FOCUS_UNCALIBRATED },
297 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
298 CAM_FOCUS_APPROXIMATE },
299 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
300 CAM_FOCUS_CALIBRATED }
301};
302
303const QCamera3HardwareInterface::QCameraMap<
304 camera_metadata_enum_android_lens_state_t,
305 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
306 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
307 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
308};
309
310const int32_t available_thumbnail_sizes[] = {0, 0,
311 176, 144,
312 240, 144,
313 256, 144,
314 240, 160,
315 256, 154,
316 240, 240,
317 320, 240};
318
319const QCamera3HardwareInterface::QCameraMap<
320 camera_metadata_enum_android_sensor_test_pattern_mode_t,
321 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
322 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
323 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
324 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
328};
329
330/* Since there is no mapping for all the options some Android enum are not listed.
331 * Also, the order in this list is important because while mapping from HAL to Android it will
332 * traverse from lower to higher index which means that for HAL values that are map to different
333 * Android values, the traverse logic will select the first one found.
334 */
335const QCamera3HardwareInterface::QCameraMap<
336 camera_metadata_enum_android_sensor_reference_illuminant1_t,
337 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
338 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
339 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
340 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
354};
355
356const QCamera3HardwareInterface::QCameraMap<
357 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
358 { 60, CAM_HFR_MODE_60FPS},
359 { 90, CAM_HFR_MODE_90FPS},
360 { 120, CAM_HFR_MODE_120FPS},
361 { 150, CAM_HFR_MODE_150FPS},
362 { 180, CAM_HFR_MODE_180FPS},
363 { 210, CAM_HFR_MODE_210FPS},
364 { 240, CAM_HFR_MODE_240FPS},
365 { 480, CAM_HFR_MODE_480FPS},
366};
367
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700368const QCamera3HardwareInterface::QCameraMap<
369 qcamera3_ext_instant_aec_mode_t,
370 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
371 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
372 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
373 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
374};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800375
376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_exposure_meter_mode_t,
378 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
379 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
380 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
381 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
382 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
383 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
386};
387
388const QCamera3HardwareInterface::QCameraMap<
389 qcamera3_ext_iso_mode_t,
390 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
391 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
392 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
393 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
394 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
395 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
396 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
397 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
398 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
399};
400
Thierry Strudel3d639192016-09-09 11:52:26 -0700401camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
402 .initialize = QCamera3HardwareInterface::initialize,
403 .configure_streams = QCamera3HardwareInterface::configure_streams,
404 .register_stream_buffers = NULL,
405 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
406 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
407 .get_metadata_vendor_tag_ops = NULL,
408 .dump = QCamera3HardwareInterface::dump,
409 .flush = QCamera3HardwareInterface::flush,
410 .reserved = {0},
411};
412
413// initialise to some default value
414uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
415
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700416static inline void logEaselEvent(const char *tag, const char *event) {
417 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
418 struct timespec ts = {};
419 static int64_t kMsPerSec = 1000;
420 static int64_t kNsPerMs = 1000000;
421 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
422 if (res != OK) {
423 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
424 } else {
425 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
426 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
427 }
428 }
429}
430
Thierry Strudel3d639192016-09-09 11:52:26 -0700431/*===========================================================================
432 * FUNCTION : QCamera3HardwareInterface
433 *
434 * DESCRIPTION: constructor of QCamera3HardwareInterface
435 *
436 * PARAMETERS :
437 * @cameraId : camera ID
438 *
439 * RETURN : none
440 *==========================================================================*/
441QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
442 const camera_module_callbacks_t *callbacks)
443 : mCameraId(cameraId),
444 mCameraHandle(NULL),
445 mCameraInitialized(false),
446 mCallbackOps(NULL),
447 mMetadataChannel(NULL),
448 mPictureChannel(NULL),
449 mRawChannel(NULL),
450 mSupportChannel(NULL),
451 mAnalysisChannel(NULL),
452 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700453 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800455 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mChannelHandle(0),
458 mFirstConfiguration(true),
459 mFlush(false),
460 mFlushPerf(false),
461 mParamHeap(NULL),
462 mParameters(NULL),
463 mPrevParameters(NULL),
464 m_bIsVideo(false),
465 m_bIs4KVideo(false),
466 m_bEisSupportedSize(false),
467 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800468 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700469 m_MobicatMask(0),
470 mMinProcessedFrameDuration(0),
471 mMinJpegFrameDuration(0),
472 mMinRawFrameDuration(0),
473 mMetaFrameCount(0U),
474 mUpdateDebugLevel(false),
475 mCallbacks(callbacks),
476 mCaptureIntent(0),
477 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700478 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800479 /* DevCamDebug metadata internal m control*/
480 mDevCamDebugMetaEnable(0),
481 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mBatchSize(0),
483 mToBeQueuedVidBufs(0),
484 mHFRVideoFps(DEFAULT_VIDEO_FPS),
485 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800486 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800487 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700488 mFirstFrameNumberInBatch(0),
489 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800490 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700491 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
492 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000493 mPDSupported(false),
494 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700495 mInstantAEC(false),
496 mResetInstantAEC(false),
497 mInstantAECSettledFrameNumber(0),
498 mAecSkipDisplayFrameBound(0),
499 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800500 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700502 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700503 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700504 mState(CLOSED),
505 mIsDeviceLinked(false),
506 mIsMainCamera(true),
507 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700508 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800509 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800510 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700511 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800512 mIsApInputUsedForHdrPlus(false),
513 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800514 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700515{
516 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700517 mCommon.init(gCamCapability[cameraId]);
518 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700519#ifndef USE_HAL_3_3
520 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
521#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700522 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700523#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700524 mCameraDevice.common.close = close_camera_device;
525 mCameraDevice.ops = &mCameraOps;
526 mCameraDevice.priv = this;
527 gCamCapability[cameraId]->version = CAM_HAL_V3;
528 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
529 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
530 gCamCapability[cameraId]->min_num_pp_bufs = 3;
531
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800532 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700533
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800534 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700535 mPendingLiveRequest = 0;
536 mCurrentRequestId = -1;
537 pthread_mutex_init(&mMutex, NULL);
538
539 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
540 mDefaultMetadata[i] = NULL;
541
542 // Getting system props of different kinds
543 char prop[PROPERTY_VALUE_MAX];
544 memset(prop, 0, sizeof(prop));
545 property_get("persist.camera.raw.dump", prop, "0");
546 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800547 property_get("persist.camera.hal3.force.hdr", prop, "0");
548 mForceHdrSnapshot = atoi(prop);
549
Thierry Strudel3d639192016-09-09 11:52:26 -0700550 if (mEnableRawDump)
551 LOGD("Raw dump from Camera HAL enabled");
552
553 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
554 memset(mLdafCalib, 0, sizeof(mLdafCalib));
555
556 memset(prop, 0, sizeof(prop));
557 property_get("persist.camera.tnr.preview", prop, "0");
558 m_bTnrPreview = (uint8_t)atoi(prop);
559
560 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800561 property_get("persist.camera.swtnr.preview", prop, "1");
562 m_bSwTnrPreview = (uint8_t)atoi(prop);
563
564 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700565 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700566 m_bTnrVideo = (uint8_t)atoi(prop);
567
568 memset(prop, 0, sizeof(prop));
569 property_get("persist.camera.avtimer.debug", prop, "0");
570 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800571 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700572
Thierry Strudel54dc9782017-02-15 12:12:10 -0800573 memset(prop, 0, sizeof(prop));
574 property_get("persist.camera.cacmode.disable", prop, "0");
575 m_cacModeDisabled = (uint8_t)atoi(prop);
576
Thierry Strudel3d639192016-09-09 11:52:26 -0700577 //Load and read GPU library.
578 lib_surface_utils = NULL;
579 LINK_get_surface_pixel_alignment = NULL;
580 mSurfaceStridePadding = CAM_PAD_TO_32;
581 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
582 if (lib_surface_utils) {
583 *(void **)&LINK_get_surface_pixel_alignment =
584 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
585 if (LINK_get_surface_pixel_alignment) {
586 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
587 }
588 dlclose(lib_surface_utils);
589 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700590
Emilian Peev0f3c3162017-03-15 12:57:46 +0000591 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
592 mPDSupported = (0 <= mPDIndex) ? true : false;
593
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700594 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700595}
596
597/*===========================================================================
598 * FUNCTION : ~QCamera3HardwareInterface
599 *
600 * DESCRIPTION: destructor of QCamera3HardwareInterface
601 *
602 * PARAMETERS : none
603 *
604 * RETURN : none
605 *==========================================================================*/
606QCamera3HardwareInterface::~QCamera3HardwareInterface()
607{
608 LOGD("E");
609
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800610 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700611
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800612 // Disable power hint and enable the perf lock for close camera
613 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
614 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
615
616 // unlink of dualcam during close camera
617 if (mIsDeviceLinked) {
618 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
619 &m_pDualCamCmdPtr->bundle_info;
620 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
621 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
622 pthread_mutex_lock(&gCamLock);
623
624 if (mIsMainCamera == 1) {
625 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
626 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
627 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
628 // related session id should be session id of linked session
629 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
630 } else {
631 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
632 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
633 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
634 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
635 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800636 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800637 pthread_mutex_unlock(&gCamLock);
638
639 rc = mCameraHandle->ops->set_dual_cam_cmd(
640 mCameraHandle->camera_handle);
641 if (rc < 0) {
642 LOGE("Dualcam: Unlink failed, but still proceed to close");
643 }
644 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700645
646 /* We need to stop all streams before deleting any stream */
647 if (mRawDumpChannel) {
648 mRawDumpChannel->stop();
649 }
650
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700651 if (mHdrPlusRawSrcChannel) {
652 mHdrPlusRawSrcChannel->stop();
653 }
654
Thierry Strudel3d639192016-09-09 11:52:26 -0700655 // NOTE: 'camera3_stream_t *' objects are already freed at
656 // this stage by the framework
657 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
658 it != mStreamInfo.end(); it++) {
659 QCamera3ProcessingChannel *channel = (*it)->channel;
660 if (channel) {
661 channel->stop();
662 }
663 }
664 if (mSupportChannel)
665 mSupportChannel->stop();
666
667 if (mAnalysisChannel) {
668 mAnalysisChannel->stop();
669 }
670 if (mMetadataChannel) {
671 mMetadataChannel->stop();
672 }
673 if (mChannelHandle) {
674 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
675 mChannelHandle);
676 LOGD("stopping channel %d", mChannelHandle);
677 }
678
679 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
680 it != mStreamInfo.end(); it++) {
681 QCamera3ProcessingChannel *channel = (*it)->channel;
682 if (channel)
683 delete channel;
684 free (*it);
685 }
686 if (mSupportChannel) {
687 delete mSupportChannel;
688 mSupportChannel = NULL;
689 }
690
691 if (mAnalysisChannel) {
692 delete mAnalysisChannel;
693 mAnalysisChannel = NULL;
694 }
695 if (mRawDumpChannel) {
696 delete mRawDumpChannel;
697 mRawDumpChannel = NULL;
698 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700699 if (mHdrPlusRawSrcChannel) {
700 delete mHdrPlusRawSrcChannel;
701 mHdrPlusRawSrcChannel = NULL;
702 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700703 if (mDummyBatchChannel) {
704 delete mDummyBatchChannel;
705 mDummyBatchChannel = NULL;
706 }
707
708 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800709 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700710
711 if (mMetadataChannel) {
712 delete mMetadataChannel;
713 mMetadataChannel = NULL;
714 }
715
716 /* Clean up all channels */
717 if (mCameraInitialized) {
718 if(!mFirstConfiguration){
719 //send the last unconfigure
720 cam_stream_size_info_t stream_config_info;
721 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
722 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
723 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800724 m_bIs4KVideo ? 0 :
725 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700726 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700727 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
728 stream_config_info);
729 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
730 if (rc < 0) {
731 LOGE("set_parms failed for unconfigure");
732 }
733 }
734 deinitParameters();
735 }
736
737 if (mChannelHandle) {
738 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
739 mChannelHandle);
740 LOGH("deleting channel %d", mChannelHandle);
741 mChannelHandle = 0;
742 }
743
744 if (mState != CLOSED)
745 closeCamera();
746
747 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
748 req.mPendingBufferList.clear();
749 }
750 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700751 for (pendingRequestIterator i = mPendingRequestsList.begin();
752 i != mPendingRequestsList.end();) {
753 i = erasePendingRequest(i);
754 }
755 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
756 if (mDefaultMetadata[i])
757 free_camera_metadata(mDefaultMetadata[i]);
758
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800759 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700760
761 pthread_cond_destroy(&mRequestCond);
762
763 pthread_cond_destroy(&mBuffersCond);
764
765 pthread_mutex_destroy(&mMutex);
766 LOGD("X");
767}
768
769/*===========================================================================
770 * FUNCTION : erasePendingRequest
771 *
772 * DESCRIPTION: function to erase a desired pending request after freeing any
773 * allocated memory
774 *
775 * PARAMETERS :
776 * @i : iterator pointing to pending request to be erased
777 *
778 * RETURN : iterator pointing to the next request
779 *==========================================================================*/
780QCamera3HardwareInterface::pendingRequestIterator
781 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
782{
783 if (i->input_buffer != NULL) {
784 free(i->input_buffer);
785 i->input_buffer = NULL;
786 }
787 if (i->settings != NULL)
788 free_camera_metadata((camera_metadata_t*)i->settings);
789 return mPendingRequestsList.erase(i);
790}
791
792/*===========================================================================
793 * FUNCTION : camEvtHandle
794 *
795 * DESCRIPTION: Function registered to mm-camera-interface to handle events
796 *
797 * PARAMETERS :
798 * @camera_handle : interface layer camera handle
799 * @evt : ptr to event
800 * @user_data : user data ptr
801 *
802 * RETURN : none
803 *==========================================================================*/
804void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
805 mm_camera_event_t *evt,
806 void *user_data)
807{
808 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
809 if (obj && evt) {
810 switch(evt->server_event_type) {
811 case CAM_EVENT_TYPE_DAEMON_DIED:
812 pthread_mutex_lock(&obj->mMutex);
813 obj->mState = ERROR;
814 pthread_mutex_unlock(&obj->mMutex);
815 LOGE("Fatal, camera daemon died");
816 break;
817
818 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
819 LOGD("HAL got request pull from Daemon");
820 pthread_mutex_lock(&obj->mMutex);
821 obj->mWokenUpByDaemon = true;
822 obj->unblockRequestIfNecessary();
823 pthread_mutex_unlock(&obj->mMutex);
824 break;
825
826 default:
827 LOGW("Warning: Unhandled event %d",
828 evt->server_event_type);
829 break;
830 }
831 } else {
832 LOGE("NULL user_data/evt");
833 }
834}
835
836/*===========================================================================
837 * FUNCTION : openCamera
838 *
839 * DESCRIPTION: open camera
840 *
841 * PARAMETERS :
842 * @hw_device : double ptr for camera device struct
843 *
844 * RETURN : int32_t type of status
845 * NO_ERROR -- success
846 * none-zero failure code
847 *==========================================================================*/
848int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
849{
850 int rc = 0;
851 if (mState != CLOSED) {
852 *hw_device = NULL;
853 return PERMISSION_DENIED;
854 }
855
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800856 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700857 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
858 mCameraId);
859
860 rc = openCamera();
861 if (rc == 0) {
862 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800863 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700864 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800865 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700866
Thierry Strudel3d639192016-09-09 11:52:26 -0700867 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
868 mCameraId, rc);
869
870 if (rc == NO_ERROR) {
871 mState = OPENED;
872 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800873
Thierry Strudel3d639192016-09-09 11:52:26 -0700874 return rc;
875}
876
877/*===========================================================================
878 * FUNCTION : openCamera
879 *
880 * DESCRIPTION: open camera
881 *
882 * PARAMETERS : none
883 *
884 * RETURN : int32_t type of status
885 * NO_ERROR -- success
886 * none-zero failure code
887 *==========================================================================*/
888int QCamera3HardwareInterface::openCamera()
889{
890 int rc = 0;
891 char value[PROPERTY_VALUE_MAX];
892
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800893 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700894 if (mCameraHandle) {
895 LOGE("Failure: Camera already opened");
896 return ALREADY_EXISTS;
897 }
898
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700899 {
900 Mutex::Autolock l(gHdrPlusClientLock);
901 if (gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700902 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700903 rc = gEaselManagerClient.resume();
904 if (rc != 0) {
905 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
906 return rc;
907 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800908 }
909 }
910
Thierry Strudel3d639192016-09-09 11:52:26 -0700911 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
912 if (rc < 0) {
913 LOGE("Failed to reserve flash for camera id: %d",
914 mCameraId);
915 return UNKNOWN_ERROR;
916 }
917
918 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
919 if (rc) {
920 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
921 return rc;
922 }
923
924 if (!mCameraHandle) {
925 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
926 return -ENODEV;
927 }
928
929 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
930 camEvtHandle, (void *)this);
931
932 if (rc < 0) {
933 LOGE("Error, failed to register event callback");
934 /* Not closing camera here since it is already handled in destructor */
935 return FAILED_TRANSACTION;
936 }
937
938 mExifParams.debug_params =
939 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
940 if (mExifParams.debug_params) {
941 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
942 } else {
943 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
944 return NO_MEMORY;
945 }
946 mFirstConfiguration = true;
947
948 //Notify display HAL that a camera session is active.
949 //But avoid calling the same during bootup because camera service might open/close
950 //cameras at boot time during its initialization and display service will also internally
951 //wait for camera service to initialize first while calling this display API, resulting in a
952 //deadlock situation. Since boot time camera open/close calls are made only to fetch
953 //capabilities, no need of this display bw optimization.
954 //Use "service.bootanim.exit" property to know boot status.
955 property_get("service.bootanim.exit", value, "0");
956 if (atoi(value) == 1) {
957 pthread_mutex_lock(&gCamLock);
958 if (gNumCameraSessions++ == 0) {
959 setCameraLaunchStatus(true);
960 }
961 pthread_mutex_unlock(&gCamLock);
962 }
963
964 //fill the session id needed while linking dual cam
965 pthread_mutex_lock(&gCamLock);
966 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
967 &sessionId[mCameraId]);
968 pthread_mutex_unlock(&gCamLock);
969
970 if (rc < 0) {
971 LOGE("Error, failed to get sessiion id");
972 return UNKNOWN_ERROR;
973 } else {
974 //Allocate related cam sync buffer
975 //this is needed for the payload that goes along with bundling cmd for related
976 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700977 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
978 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700979 if(rc != OK) {
980 rc = NO_MEMORY;
981 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
982 return NO_MEMORY;
983 }
984
985 //Map memory for related cam sync buffer
986 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700987 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
988 m_pDualCamCmdHeap->getFd(0),
989 sizeof(cam_dual_camera_cmd_info_t),
990 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700991 if(rc < 0) {
992 LOGE("Dualcam: failed to map Related cam sync buffer");
993 rc = FAILED_TRANSACTION;
994 return NO_MEMORY;
995 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700996 m_pDualCamCmdPtr =
997 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700998 }
999
1000 LOGH("mCameraId=%d",mCameraId);
1001
1002 return NO_ERROR;
1003}
1004
1005/*===========================================================================
1006 * FUNCTION : closeCamera
1007 *
1008 * DESCRIPTION: close camera
1009 *
1010 * PARAMETERS : none
1011 *
1012 * RETURN : int32_t type of status
1013 * NO_ERROR -- success
1014 * none-zero failure code
1015 *==========================================================================*/
1016int QCamera3HardwareInterface::closeCamera()
1017{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001018 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001019 int rc = NO_ERROR;
1020 char value[PROPERTY_VALUE_MAX];
1021
1022 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1023 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001024
1025 // unmap memory for related cam sync buffer
1026 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001027 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001028 if (NULL != m_pDualCamCmdHeap) {
1029 m_pDualCamCmdHeap->deallocate();
1030 delete m_pDualCamCmdHeap;
1031 m_pDualCamCmdHeap = NULL;
1032 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001033 }
1034
Thierry Strudel3d639192016-09-09 11:52:26 -07001035 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1036 mCameraHandle = NULL;
1037
1038 //reset session id to some invalid id
1039 pthread_mutex_lock(&gCamLock);
1040 sessionId[mCameraId] = 0xDEADBEEF;
1041 pthread_mutex_unlock(&gCamLock);
1042
1043 //Notify display HAL that there is no active camera session
1044 //but avoid calling the same during bootup. Refer to openCamera
1045 //for more details.
1046 property_get("service.bootanim.exit", value, "0");
1047 if (atoi(value) == 1) {
1048 pthread_mutex_lock(&gCamLock);
1049 if (--gNumCameraSessions == 0) {
1050 setCameraLaunchStatus(false);
1051 }
1052 pthread_mutex_unlock(&gCamLock);
1053 }
1054
Thierry Strudel3d639192016-09-09 11:52:26 -07001055 if (mExifParams.debug_params) {
1056 free(mExifParams.debug_params);
1057 mExifParams.debug_params = NULL;
1058 }
1059 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1060 LOGW("Failed to release flash for camera id: %d",
1061 mCameraId);
1062 }
1063 mState = CLOSED;
1064 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1065 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001066
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001067 {
1068 Mutex::Autolock l(gHdrPlusClientLock);
1069 if (gHdrPlusClient != nullptr) {
1070 // Disable HDR+ mode.
1071 disableHdrPlusModeLocked();
1072 // Disconnect Easel if it's connected.
1073 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1074 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001075 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001076
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001077 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001078 rc = gEaselManagerClient.stopMipi(mCameraId);
1079 if (rc != 0) {
1080 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1081 }
1082
1083 rc = gEaselManagerClient.suspend();
1084 if (rc != 0) {
1085 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1086 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001087 }
1088 }
1089
Thierry Strudel3d639192016-09-09 11:52:26 -07001090 return rc;
1091}
1092
1093/*===========================================================================
1094 * FUNCTION : initialize
1095 *
1096 * DESCRIPTION: Initialize frameworks callback functions
1097 *
1098 * PARAMETERS :
1099 * @callback_ops : callback function to frameworks
1100 *
1101 * RETURN :
1102 *
1103 *==========================================================================*/
1104int QCamera3HardwareInterface::initialize(
1105 const struct camera3_callback_ops *callback_ops)
1106{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001107 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001108 int rc;
1109
1110 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1111 pthread_mutex_lock(&mMutex);
1112
1113 // Validate current state
1114 switch (mState) {
1115 case OPENED:
1116 /* valid state */
1117 break;
1118 default:
1119 LOGE("Invalid state %d", mState);
1120 rc = -ENODEV;
1121 goto err1;
1122 }
1123
1124 rc = initParameters();
1125 if (rc < 0) {
1126 LOGE("initParamters failed %d", rc);
1127 goto err1;
1128 }
1129 mCallbackOps = callback_ops;
1130
1131 mChannelHandle = mCameraHandle->ops->add_channel(
1132 mCameraHandle->camera_handle, NULL, NULL, this);
1133 if (mChannelHandle == 0) {
1134 LOGE("add_channel failed");
1135 rc = -ENOMEM;
1136 pthread_mutex_unlock(&mMutex);
1137 return rc;
1138 }
1139
1140 pthread_mutex_unlock(&mMutex);
1141 mCameraInitialized = true;
1142 mState = INITIALIZED;
1143 LOGI("X");
1144 return 0;
1145
1146err1:
1147 pthread_mutex_unlock(&mMutex);
1148 return rc;
1149}
1150
1151/*===========================================================================
1152 * FUNCTION : validateStreamDimensions
1153 *
1154 * DESCRIPTION: Check if the configuration requested are those advertised
1155 *
1156 * PARAMETERS :
1157 * @stream_list : streams to be configured
1158 *
1159 * RETURN :
1160 *
1161 *==========================================================================*/
1162int QCamera3HardwareInterface::validateStreamDimensions(
1163 camera3_stream_configuration_t *streamList)
1164{
1165 int rc = NO_ERROR;
1166 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001167 uint32_t depthWidth = 0;
1168 uint32_t depthHeight = 0;
1169 if (mPDSupported) {
1170 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1171 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1172 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001173
1174 camera3_stream_t *inputStream = NULL;
1175 /*
1176 * Loop through all streams to find input stream if it exists*
1177 */
1178 for (size_t i = 0; i< streamList->num_streams; i++) {
1179 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1180 if (inputStream != NULL) {
1181 LOGE("Error, Multiple input streams requested");
1182 return -EINVAL;
1183 }
1184 inputStream = streamList->streams[i];
1185 }
1186 }
1187 /*
1188 * Loop through all streams requested in configuration
1189 * Check if unsupported sizes have been requested on any of them
1190 */
1191 for (size_t j = 0; j < streamList->num_streams; j++) {
1192 bool sizeFound = false;
1193 camera3_stream_t *newStream = streamList->streams[j];
1194
1195 uint32_t rotatedHeight = newStream->height;
1196 uint32_t rotatedWidth = newStream->width;
1197 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1198 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1199 rotatedHeight = newStream->width;
1200 rotatedWidth = newStream->height;
1201 }
1202
1203 /*
1204 * Sizes are different for each type of stream format check against
1205 * appropriate table.
1206 */
1207 switch (newStream->format) {
1208 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1209 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1210 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001211 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1212 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1213 mPDSupported) {
1214 if ((depthWidth == newStream->width) &&
1215 (depthHeight == newStream->height)) {
1216 sizeFound = true;
1217 }
1218 break;
1219 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001220 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1221 for (size_t i = 0; i < count; i++) {
1222 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1223 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1224 sizeFound = true;
1225 break;
1226 }
1227 }
1228 break;
1229 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001230 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1231 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001232 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001233 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001234 if ((depthSamplesCount == newStream->width) &&
1235 (1 == newStream->height)) {
1236 sizeFound = true;
1237 }
1238 break;
1239 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001240 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1241 /* Verify set size against generated sizes table */
1242 for (size_t i = 0; i < count; i++) {
1243 if (((int32_t)rotatedWidth ==
1244 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1245 ((int32_t)rotatedHeight ==
1246 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1247 sizeFound = true;
1248 break;
1249 }
1250 }
1251 break;
1252 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1253 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1254 default:
1255 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1256 || newStream->stream_type == CAMERA3_STREAM_INPUT
1257 || IS_USAGE_ZSL(newStream->usage)) {
1258 if (((int32_t)rotatedWidth ==
1259 gCamCapability[mCameraId]->active_array_size.width) &&
1260 ((int32_t)rotatedHeight ==
1261 gCamCapability[mCameraId]->active_array_size.height)) {
1262 sizeFound = true;
1263 break;
1264 }
1265 /* We could potentially break here to enforce ZSL stream
1266 * set from frameworks always is full active array size
1267 * but it is not clear from the spc if framework will always
1268 * follow that, also we have logic to override to full array
1269 * size, so keeping the logic lenient at the moment
1270 */
1271 }
1272 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1273 MAX_SIZES_CNT);
1274 for (size_t i = 0; i < count; i++) {
1275 if (((int32_t)rotatedWidth ==
1276 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1277 ((int32_t)rotatedHeight ==
1278 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1279 sizeFound = true;
1280 break;
1281 }
1282 }
1283 break;
1284 } /* End of switch(newStream->format) */
1285
1286 /* We error out even if a single stream has unsupported size set */
1287 if (!sizeFound) {
1288 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1289 rotatedWidth, rotatedHeight, newStream->format,
1290 gCamCapability[mCameraId]->active_array_size.width,
1291 gCamCapability[mCameraId]->active_array_size.height);
1292 rc = -EINVAL;
1293 break;
1294 }
1295 } /* End of for each stream */
1296 return rc;
1297}
1298
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001299/*===========================================================================
1300 * FUNCTION : validateUsageFlags
1301 *
1302 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1303 *
1304 * PARAMETERS :
1305 * @stream_list : streams to be configured
1306 *
1307 * RETURN :
1308 * NO_ERROR if the usage flags are supported
1309 * error code if usage flags are not supported
1310 *
1311 *==========================================================================*/
1312int QCamera3HardwareInterface::validateUsageFlags(
1313 const camera3_stream_configuration_t* streamList)
1314{
1315 for (size_t j = 0; j < streamList->num_streams; j++) {
1316 const camera3_stream_t *newStream = streamList->streams[j];
1317
1318 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1319 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1320 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1321 continue;
1322 }
1323
1324 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1325 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1326 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1327 bool forcePreviewUBWC = true;
1328 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1329 forcePreviewUBWC = false;
1330 }
1331 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1332 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1333 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1334 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1335 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1336 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1337
1338 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1339 // So color spaces will always match.
1340
1341 // Check whether underlying formats of shared streams match.
1342 if (isVideo && isPreview && videoFormat != previewFormat) {
1343 LOGE("Combined video and preview usage flag is not supported");
1344 return -EINVAL;
1345 }
1346 if (isPreview && isZSL && previewFormat != zslFormat) {
1347 LOGE("Combined preview and zsl usage flag is not supported");
1348 return -EINVAL;
1349 }
1350 if (isVideo && isZSL && videoFormat != zslFormat) {
1351 LOGE("Combined video and zsl usage flag is not supported");
1352 return -EINVAL;
1353 }
1354 }
1355 return NO_ERROR;
1356}
1357
1358/*===========================================================================
1359 * FUNCTION : validateUsageFlagsForEis
1360 *
1361 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1362 *
1363 * PARAMETERS :
1364 * @stream_list : streams to be configured
1365 *
1366 * RETURN :
1367 * NO_ERROR if the usage flags are supported
1368 * error code if usage flags are not supported
1369 *
1370 *==========================================================================*/
1371int QCamera3HardwareInterface::validateUsageFlagsForEis(
1372 const camera3_stream_configuration_t* streamList)
1373{
1374 for (size_t j = 0; j < streamList->num_streams; j++) {
1375 const camera3_stream_t *newStream = streamList->streams[j];
1376
1377 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1378 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1379
1380 // Because EIS is "hard-coded" for certain use case, and current
1381 // implementation doesn't support shared preview and video on the same
1382 // stream, return failure if EIS is forced on.
1383 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1384 LOGE("Combined video and preview usage flag is not supported due to EIS");
1385 return -EINVAL;
1386 }
1387 }
1388 return NO_ERROR;
1389}
1390
Thierry Strudel3d639192016-09-09 11:52:26 -07001391/*==============================================================================
1392 * FUNCTION : isSupportChannelNeeded
1393 *
1394 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1395 *
1396 * PARAMETERS :
1397 * @stream_list : streams to be configured
1398 * @stream_config_info : the config info for streams to be configured
1399 *
1400 * RETURN : Boolen true/false decision
1401 *
1402 *==========================================================================*/
1403bool QCamera3HardwareInterface::isSupportChannelNeeded(
1404 camera3_stream_configuration_t *streamList,
1405 cam_stream_size_info_t stream_config_info)
1406{
1407 uint32_t i;
1408 bool pprocRequested = false;
1409 /* Check for conditions where PProc pipeline does not have any streams*/
1410 for (i = 0; i < stream_config_info.num_streams; i++) {
1411 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1412 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1413 pprocRequested = true;
1414 break;
1415 }
1416 }
1417
1418 if (pprocRequested == false )
1419 return true;
1420
1421 /* Dummy stream needed if only raw or jpeg streams present */
1422 for (i = 0; i < streamList->num_streams; i++) {
1423 switch(streamList->streams[i]->format) {
1424 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1425 case HAL_PIXEL_FORMAT_RAW10:
1426 case HAL_PIXEL_FORMAT_RAW16:
1427 case HAL_PIXEL_FORMAT_BLOB:
1428 break;
1429 default:
1430 return false;
1431 }
1432 }
1433 return true;
1434}
1435
1436/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001437 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001438 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001439 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001440 *
1441 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001442 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001443 *
1444 * RETURN : int32_t type of status
1445 * NO_ERROR -- success
1446 * none-zero failure code
1447 *
1448 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001449int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001450{
1451 int32_t rc = NO_ERROR;
1452
1453 cam_dimension_t max_dim = {0, 0};
1454 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1455 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1456 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1457 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1458 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1459 }
1460
1461 clear_metadata_buffer(mParameters);
1462
1463 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1464 max_dim);
1465 if (rc != NO_ERROR) {
1466 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1467 return rc;
1468 }
1469
1470 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1471 if (rc != NO_ERROR) {
1472 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1473 return rc;
1474 }
1475
1476 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001477 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001478
1479 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1480 mParameters);
1481 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001482 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001483 return rc;
1484 }
1485
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001486 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001487 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1488 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1489 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1490 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1491 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001492
1493 return rc;
1494}
1495
1496/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001497 * FUNCTION : addToPPFeatureMask
1498 *
1499 * DESCRIPTION: add additional features to pp feature mask based on
1500 * stream type and usecase
1501 *
1502 * PARAMETERS :
1503 * @stream_format : stream type for feature mask
1504 * @stream_idx : stream idx within postprocess_mask list to change
1505 *
1506 * RETURN : NULL
1507 *
1508 *==========================================================================*/
1509void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1510 uint32_t stream_idx)
1511{
1512 char feature_mask_value[PROPERTY_VALUE_MAX];
1513 cam_feature_mask_t feature_mask;
1514 int args_converted;
1515 int property_len;
1516
1517 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001518#ifdef _LE_CAMERA_
1519 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1520 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1521 property_len = property_get("persist.camera.hal3.feature",
1522 feature_mask_value, swtnr_feature_mask_value);
1523#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001524 property_len = property_get("persist.camera.hal3.feature",
1525 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001526#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001527 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1528 (feature_mask_value[1] == 'x')) {
1529 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1530 } else {
1531 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1532 }
1533 if (1 != args_converted) {
1534 feature_mask = 0;
1535 LOGE("Wrong feature mask %s", feature_mask_value);
1536 return;
1537 }
1538
1539 switch (stream_format) {
1540 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1541 /* Add LLVD to pp feature mask only if video hint is enabled */
1542 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1543 mStreamConfigInfo.postprocess_mask[stream_idx]
1544 |= CAM_QTI_FEATURE_SW_TNR;
1545 LOGH("Added SW TNR to pp feature mask");
1546 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1547 mStreamConfigInfo.postprocess_mask[stream_idx]
1548 |= CAM_QCOM_FEATURE_LLVD;
1549 LOGH("Added LLVD SeeMore to pp feature mask");
1550 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001551 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1552 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1553 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1554 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001555 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1556 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1557 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1558 CAM_QTI_FEATURE_BINNING_CORRECTION;
1559 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001560 break;
1561 }
1562 default:
1563 break;
1564 }
1565 LOGD("PP feature mask %llx",
1566 mStreamConfigInfo.postprocess_mask[stream_idx]);
1567}
1568
1569/*==============================================================================
1570 * FUNCTION : updateFpsInPreviewBuffer
1571 *
1572 * DESCRIPTION: update FPS information in preview buffer.
1573 *
1574 * PARAMETERS :
1575 * @metadata : pointer to metadata buffer
1576 * @frame_number: frame_number to look for in pending buffer list
1577 *
1578 * RETURN : None
1579 *
1580 *==========================================================================*/
1581void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1582 uint32_t frame_number)
1583{
1584 // Mark all pending buffers for this particular request
1585 // with corresponding framerate information
1586 for (List<PendingBuffersInRequest>::iterator req =
1587 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1588 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1589 for(List<PendingBufferInfo>::iterator j =
1590 req->mPendingBufferList.begin();
1591 j != req->mPendingBufferList.end(); j++) {
1592 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1593 if ((req->frame_number == frame_number) &&
1594 (channel->getStreamTypeMask() &
1595 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1596 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1597 CAM_INTF_PARM_FPS_RANGE, metadata) {
1598 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1599 struct private_handle_t *priv_handle =
1600 (struct private_handle_t *)(*(j->buffer));
1601 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1602 }
1603 }
1604 }
1605 }
1606}
1607
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001608/*==============================================================================
1609 * FUNCTION : updateTimeStampInPendingBuffers
1610 *
1611 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1612 * of a frame number
1613 *
1614 * PARAMETERS :
1615 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1616 * @timestamp : timestamp to be set
1617 *
1618 * RETURN : None
1619 *
1620 *==========================================================================*/
1621void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1622 uint32_t frameNumber, nsecs_t timestamp)
1623{
1624 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1625 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1626 if (req->frame_number != frameNumber)
1627 continue;
1628
1629 for (auto k = req->mPendingBufferList.begin();
1630 k != req->mPendingBufferList.end(); k++ ) {
1631 struct private_handle_t *priv_handle =
1632 (struct private_handle_t *) (*(k->buffer));
1633 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1634 }
1635 }
1636 return;
1637}
1638
Thierry Strudel3d639192016-09-09 11:52:26 -07001639/*===========================================================================
1640 * FUNCTION : configureStreams
1641 *
1642 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1643 * and output streams.
1644 *
1645 * PARAMETERS :
1646 * @stream_list : streams to be configured
1647 *
1648 * RETURN :
1649 *
1650 *==========================================================================*/
1651int QCamera3HardwareInterface::configureStreams(
1652 camera3_stream_configuration_t *streamList)
1653{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001654 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001655 int rc = 0;
1656
1657 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001658 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001659 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001660 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001661
1662 return rc;
1663}
1664
1665/*===========================================================================
1666 * FUNCTION : configureStreamsPerfLocked
1667 *
1668 * DESCRIPTION: configureStreams while perfLock is held.
1669 *
1670 * PARAMETERS :
1671 * @stream_list : streams to be configured
1672 *
1673 * RETURN : int32_t type of status
1674 * NO_ERROR -- success
1675 * none-zero failure code
1676 *==========================================================================*/
1677int QCamera3HardwareInterface::configureStreamsPerfLocked(
1678 camera3_stream_configuration_t *streamList)
1679{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001680 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001681 int rc = 0;
1682
1683 // Sanity check stream_list
1684 if (streamList == NULL) {
1685 LOGE("NULL stream configuration");
1686 return BAD_VALUE;
1687 }
1688 if (streamList->streams == NULL) {
1689 LOGE("NULL stream list");
1690 return BAD_VALUE;
1691 }
1692
1693 if (streamList->num_streams < 1) {
1694 LOGE("Bad number of streams requested: %d",
1695 streamList->num_streams);
1696 return BAD_VALUE;
1697 }
1698
1699 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1700 LOGE("Maximum number of streams %d exceeded: %d",
1701 MAX_NUM_STREAMS, streamList->num_streams);
1702 return BAD_VALUE;
1703 }
1704
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001705 rc = validateUsageFlags(streamList);
1706 if (rc != NO_ERROR) {
1707 return rc;
1708 }
1709
Thierry Strudel3d639192016-09-09 11:52:26 -07001710 mOpMode = streamList->operation_mode;
1711 LOGD("mOpMode: %d", mOpMode);
1712
1713 /* first invalidate all the steams in the mStreamList
1714 * if they appear again, they will be validated */
1715 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1716 it != mStreamInfo.end(); it++) {
1717 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1718 if (channel) {
1719 channel->stop();
1720 }
1721 (*it)->status = INVALID;
1722 }
1723
1724 if (mRawDumpChannel) {
1725 mRawDumpChannel->stop();
1726 delete mRawDumpChannel;
1727 mRawDumpChannel = NULL;
1728 }
1729
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001730 if (mHdrPlusRawSrcChannel) {
1731 mHdrPlusRawSrcChannel->stop();
1732 delete mHdrPlusRawSrcChannel;
1733 mHdrPlusRawSrcChannel = NULL;
1734 }
1735
Thierry Strudel3d639192016-09-09 11:52:26 -07001736 if (mSupportChannel)
1737 mSupportChannel->stop();
1738
1739 if (mAnalysisChannel) {
1740 mAnalysisChannel->stop();
1741 }
1742 if (mMetadataChannel) {
1743 /* If content of mStreamInfo is not 0, there is metadata stream */
1744 mMetadataChannel->stop();
1745 }
1746 if (mChannelHandle) {
1747 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1748 mChannelHandle);
1749 LOGD("stopping channel %d", mChannelHandle);
1750 }
1751
1752 pthread_mutex_lock(&mMutex);
1753
1754 // Check state
1755 switch (mState) {
1756 case INITIALIZED:
1757 case CONFIGURED:
1758 case STARTED:
1759 /* valid state */
1760 break;
1761 default:
1762 LOGE("Invalid state %d", mState);
1763 pthread_mutex_unlock(&mMutex);
1764 return -ENODEV;
1765 }
1766
1767 /* Check whether we have video stream */
1768 m_bIs4KVideo = false;
1769 m_bIsVideo = false;
1770 m_bEisSupportedSize = false;
1771 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001772 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001773 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001774 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001775 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001776 uint32_t videoWidth = 0U;
1777 uint32_t videoHeight = 0U;
1778 size_t rawStreamCnt = 0;
1779 size_t stallStreamCnt = 0;
1780 size_t processedStreamCnt = 0;
1781 // Number of streams on ISP encoder path
1782 size_t numStreamsOnEncoder = 0;
1783 size_t numYuv888OnEncoder = 0;
1784 bool bYuv888OverrideJpeg = false;
1785 cam_dimension_t largeYuv888Size = {0, 0};
1786 cam_dimension_t maxViewfinderSize = {0, 0};
1787 bool bJpegExceeds4K = false;
1788 bool bJpegOnEncoder = false;
1789 bool bUseCommonFeatureMask = false;
1790 cam_feature_mask_t commonFeatureMask = 0;
1791 bool bSmallJpegSize = false;
1792 uint32_t width_ratio;
1793 uint32_t height_ratio;
1794 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1795 camera3_stream_t *inputStream = NULL;
1796 bool isJpeg = false;
1797 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001798 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001799 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001800
1801 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1802
1803 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001804 uint8_t eis_prop_set;
1805 uint32_t maxEisWidth = 0;
1806 uint32_t maxEisHeight = 0;
1807
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001808 // Initialize all instant AEC related variables
1809 mInstantAEC = false;
1810 mResetInstantAEC = false;
1811 mInstantAECSettledFrameNumber = 0;
1812 mAecSkipDisplayFrameBound = 0;
1813 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001814 mCurrFeatureState = 0;
1815 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001816
Thierry Strudel3d639192016-09-09 11:52:26 -07001817 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1818
1819 size_t count = IS_TYPE_MAX;
1820 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1821 for (size_t i = 0; i < count; i++) {
1822 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001823 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1824 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001825 break;
1826 }
1827 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001828
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001829 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001830 maxEisWidth = MAX_EIS_WIDTH;
1831 maxEisHeight = MAX_EIS_HEIGHT;
1832 }
1833
1834 /* EIS setprop control */
1835 char eis_prop[PROPERTY_VALUE_MAX];
1836 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001837 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001838 eis_prop_set = (uint8_t)atoi(eis_prop);
1839
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001840 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001841 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1842
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001843 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1844 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001845
Thierry Strudel3d639192016-09-09 11:52:26 -07001846 /* stream configurations */
1847 for (size_t i = 0; i < streamList->num_streams; i++) {
1848 camera3_stream_t *newStream = streamList->streams[i];
1849 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1850 "height = %d, rotation = %d, usage = 0x%x",
1851 i, newStream->stream_type, newStream->format,
1852 newStream->width, newStream->height, newStream->rotation,
1853 newStream->usage);
1854 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1855 newStream->stream_type == CAMERA3_STREAM_INPUT){
1856 isZsl = true;
1857 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001858 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1859 IS_USAGE_PREVIEW(newStream->usage)) {
1860 isPreview = true;
1861 }
1862
Thierry Strudel3d639192016-09-09 11:52:26 -07001863 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1864 inputStream = newStream;
1865 }
1866
Emilian Peev7650c122017-01-19 08:24:33 -08001867 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1868 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001869 isJpeg = true;
1870 jpegSize.width = newStream->width;
1871 jpegSize.height = newStream->height;
1872 if (newStream->width > VIDEO_4K_WIDTH ||
1873 newStream->height > VIDEO_4K_HEIGHT)
1874 bJpegExceeds4K = true;
1875 }
1876
1877 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1878 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1879 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001880 // In HAL3 we can have multiple different video streams.
1881 // The variables video width and height are used below as
1882 // dimensions of the biggest of them
1883 if (videoWidth < newStream->width ||
1884 videoHeight < newStream->height) {
1885 videoWidth = newStream->width;
1886 videoHeight = newStream->height;
1887 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001888 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1889 (VIDEO_4K_HEIGHT <= newStream->height)) {
1890 m_bIs4KVideo = true;
1891 }
1892 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1893 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001894
Thierry Strudel3d639192016-09-09 11:52:26 -07001895 }
1896 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1897 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1898 switch (newStream->format) {
1899 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001900 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1901 depthPresent = true;
1902 break;
1903 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001904 stallStreamCnt++;
1905 if (isOnEncoder(maxViewfinderSize, newStream->width,
1906 newStream->height)) {
1907 numStreamsOnEncoder++;
1908 bJpegOnEncoder = true;
1909 }
1910 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1911 newStream->width);
1912 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1913 newStream->height);;
1914 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1915 "FATAL: max_downscale_factor cannot be zero and so assert");
1916 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1917 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1918 LOGH("Setting small jpeg size flag to true");
1919 bSmallJpegSize = true;
1920 }
1921 break;
1922 case HAL_PIXEL_FORMAT_RAW10:
1923 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1924 case HAL_PIXEL_FORMAT_RAW16:
1925 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001926 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1927 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1928 pdStatCount++;
1929 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001930 break;
1931 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1932 processedStreamCnt++;
1933 if (isOnEncoder(maxViewfinderSize, newStream->width,
1934 newStream->height)) {
1935 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1936 !IS_USAGE_ZSL(newStream->usage)) {
1937 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1938 }
1939 numStreamsOnEncoder++;
1940 }
1941 break;
1942 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1943 processedStreamCnt++;
1944 if (isOnEncoder(maxViewfinderSize, newStream->width,
1945 newStream->height)) {
1946 // If Yuv888 size is not greater than 4K, set feature mask
1947 // to SUPERSET so that it support concurrent request on
1948 // YUV and JPEG.
1949 if (newStream->width <= VIDEO_4K_WIDTH &&
1950 newStream->height <= VIDEO_4K_HEIGHT) {
1951 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1952 }
1953 numStreamsOnEncoder++;
1954 numYuv888OnEncoder++;
1955 largeYuv888Size.width = newStream->width;
1956 largeYuv888Size.height = newStream->height;
1957 }
1958 break;
1959 default:
1960 processedStreamCnt++;
1961 if (isOnEncoder(maxViewfinderSize, newStream->width,
1962 newStream->height)) {
1963 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1964 numStreamsOnEncoder++;
1965 }
1966 break;
1967 }
1968
1969 }
1970 }
1971
1972 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1973 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1974 !m_bIsVideo) {
1975 m_bEisEnable = false;
1976 }
1977
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001978 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1979 pthread_mutex_unlock(&mMutex);
1980 return -EINVAL;
1981 }
1982
Thierry Strudel54dc9782017-02-15 12:12:10 -08001983 uint8_t forceEnableTnr = 0;
1984 char tnr_prop[PROPERTY_VALUE_MAX];
1985 memset(tnr_prop, 0, sizeof(tnr_prop));
1986 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1987 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1988
Thierry Strudel3d639192016-09-09 11:52:26 -07001989 /* Logic to enable/disable TNR based on specific config size/etc.*/
1990 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001991 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1992 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001993 else if (forceEnableTnr)
1994 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001995
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001996 char videoHdrProp[PROPERTY_VALUE_MAX];
1997 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1998 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1999 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2000
2001 if (hdr_mode_prop == 1 && m_bIsVideo &&
2002 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2003 m_bVideoHdrEnabled = true;
2004 else
2005 m_bVideoHdrEnabled = false;
2006
2007
Thierry Strudel3d639192016-09-09 11:52:26 -07002008 /* Check if num_streams is sane */
2009 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2010 rawStreamCnt > MAX_RAW_STREAMS ||
2011 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2012 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2013 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2014 pthread_mutex_unlock(&mMutex);
2015 return -EINVAL;
2016 }
2017 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002018 if (isZsl && m_bIs4KVideo) {
2019 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002020 pthread_mutex_unlock(&mMutex);
2021 return -EINVAL;
2022 }
2023 /* Check if stream sizes are sane */
2024 if (numStreamsOnEncoder > 2) {
2025 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2026 pthread_mutex_unlock(&mMutex);
2027 return -EINVAL;
2028 } else if (1 < numStreamsOnEncoder){
2029 bUseCommonFeatureMask = true;
2030 LOGH("Multiple streams above max viewfinder size, common mask needed");
2031 }
2032
2033 /* Check if BLOB size is greater than 4k in 4k recording case */
2034 if (m_bIs4KVideo && bJpegExceeds4K) {
2035 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2036 pthread_mutex_unlock(&mMutex);
2037 return -EINVAL;
2038 }
2039
Emilian Peev7650c122017-01-19 08:24:33 -08002040 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2041 depthPresent) {
2042 LOGE("HAL doesn't support depth streams in HFR mode!");
2043 pthread_mutex_unlock(&mMutex);
2044 return -EINVAL;
2045 }
2046
Thierry Strudel3d639192016-09-09 11:52:26 -07002047 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2048 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2049 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2050 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2051 // configurations:
2052 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2053 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2054 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2055 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2056 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2057 __func__);
2058 pthread_mutex_unlock(&mMutex);
2059 return -EINVAL;
2060 }
2061
2062 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2063 // the YUV stream's size is greater or equal to the JPEG size, set common
2064 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2065 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2066 jpegSize.width, jpegSize.height) &&
2067 largeYuv888Size.width > jpegSize.width &&
2068 largeYuv888Size.height > jpegSize.height) {
2069 bYuv888OverrideJpeg = true;
2070 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2071 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2072 }
2073
2074 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2075 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2076 commonFeatureMask);
2077 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2078 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2079
2080 rc = validateStreamDimensions(streamList);
2081 if (rc == NO_ERROR) {
2082 rc = validateStreamRotations(streamList);
2083 }
2084 if (rc != NO_ERROR) {
2085 LOGE("Invalid stream configuration requested!");
2086 pthread_mutex_unlock(&mMutex);
2087 return rc;
2088 }
2089
Emilian Peev0f3c3162017-03-15 12:57:46 +00002090 if (1 < pdStatCount) {
2091 LOGE("HAL doesn't support multiple PD streams");
2092 pthread_mutex_unlock(&mMutex);
2093 return -EINVAL;
2094 }
2095
2096 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2097 (1 == pdStatCount)) {
2098 LOGE("HAL doesn't support PD streams in HFR mode!");
2099 pthread_mutex_unlock(&mMutex);
2100 return -EINVAL;
2101 }
2102
Thierry Strudel3d639192016-09-09 11:52:26 -07002103 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2104 for (size_t i = 0; i < streamList->num_streams; i++) {
2105 camera3_stream_t *newStream = streamList->streams[i];
2106 LOGH("newStream type = %d, stream format = %d "
2107 "stream size : %d x %d, stream rotation = %d",
2108 newStream->stream_type, newStream->format,
2109 newStream->width, newStream->height, newStream->rotation);
2110 //if the stream is in the mStreamList validate it
2111 bool stream_exists = false;
2112 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2113 it != mStreamInfo.end(); it++) {
2114 if ((*it)->stream == newStream) {
2115 QCamera3ProcessingChannel *channel =
2116 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2117 stream_exists = true;
2118 if (channel)
2119 delete channel;
2120 (*it)->status = VALID;
2121 (*it)->stream->priv = NULL;
2122 (*it)->channel = NULL;
2123 }
2124 }
2125 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2126 //new stream
2127 stream_info_t* stream_info;
2128 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2129 if (!stream_info) {
2130 LOGE("Could not allocate stream info");
2131 rc = -ENOMEM;
2132 pthread_mutex_unlock(&mMutex);
2133 return rc;
2134 }
2135 stream_info->stream = newStream;
2136 stream_info->status = VALID;
2137 stream_info->channel = NULL;
2138 mStreamInfo.push_back(stream_info);
2139 }
2140 /* Covers Opaque ZSL and API1 F/W ZSL */
2141 if (IS_USAGE_ZSL(newStream->usage)
2142 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2143 if (zslStream != NULL) {
2144 LOGE("Multiple input/reprocess streams requested!");
2145 pthread_mutex_unlock(&mMutex);
2146 return BAD_VALUE;
2147 }
2148 zslStream = newStream;
2149 }
2150 /* Covers YUV reprocess */
2151 if (inputStream != NULL) {
2152 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2153 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2154 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2155 && inputStream->width == newStream->width
2156 && inputStream->height == newStream->height) {
2157 if (zslStream != NULL) {
2158 /* This scenario indicates multiple YUV streams with same size
2159 * as input stream have been requested, since zsl stream handle
2160 * is solely use for the purpose of overriding the size of streams
2161 * which share h/w streams we will just make a guess here as to
2162 * which of the stream is a ZSL stream, this will be refactored
2163 * once we make generic logic for streams sharing encoder output
2164 */
2165 LOGH("Warning, Multiple ip/reprocess streams requested!");
2166 }
2167 zslStream = newStream;
2168 }
2169 }
2170 }
2171
2172 /* If a zsl stream is set, we know that we have configured at least one input or
2173 bidirectional stream */
2174 if (NULL != zslStream) {
2175 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2176 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2177 mInputStreamInfo.format = zslStream->format;
2178 mInputStreamInfo.usage = zslStream->usage;
2179 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2180 mInputStreamInfo.dim.width,
2181 mInputStreamInfo.dim.height,
2182 mInputStreamInfo.format, mInputStreamInfo.usage);
2183 }
2184
2185 cleanAndSortStreamInfo();
2186 if (mMetadataChannel) {
2187 delete mMetadataChannel;
2188 mMetadataChannel = NULL;
2189 }
2190 if (mSupportChannel) {
2191 delete mSupportChannel;
2192 mSupportChannel = NULL;
2193 }
2194
2195 if (mAnalysisChannel) {
2196 delete mAnalysisChannel;
2197 mAnalysisChannel = NULL;
2198 }
2199
2200 if (mDummyBatchChannel) {
2201 delete mDummyBatchChannel;
2202 mDummyBatchChannel = NULL;
2203 }
2204
Emilian Peev7650c122017-01-19 08:24:33 -08002205 if (mDepthChannel) {
2206 mDepthChannel = NULL;
2207 }
2208
Thierry Strudel2896d122017-02-23 19:18:03 -08002209 char is_type_value[PROPERTY_VALUE_MAX];
2210 property_get("persist.camera.is_type", is_type_value, "4");
2211 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2212
Thierry Strudel3d639192016-09-09 11:52:26 -07002213 //Create metadata channel and initialize it
2214 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2215 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2216 gCamCapability[mCameraId]->color_arrangement);
2217 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2218 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002219 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002220 if (mMetadataChannel == NULL) {
2221 LOGE("failed to allocate metadata channel");
2222 rc = -ENOMEM;
2223 pthread_mutex_unlock(&mMutex);
2224 return rc;
2225 }
2226 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2227 if (rc < 0) {
2228 LOGE("metadata channel initialization failed");
2229 delete mMetadataChannel;
2230 mMetadataChannel = NULL;
2231 pthread_mutex_unlock(&mMutex);
2232 return rc;
2233 }
2234
Thierry Strudel2896d122017-02-23 19:18:03 -08002235 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002236 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002237 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002238 // Keep track of preview/video streams indices.
2239 // There could be more than one preview streams, but only one video stream.
2240 int32_t video_stream_idx = -1;
2241 int32_t preview_stream_idx[streamList->num_streams];
2242 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002243 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2244 /* Allocate channel objects for the requested streams */
2245 for (size_t i = 0; i < streamList->num_streams; i++) {
2246 camera3_stream_t *newStream = streamList->streams[i];
2247 uint32_t stream_usage = newStream->usage;
2248 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2249 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2250 struct camera_info *p_info = NULL;
2251 pthread_mutex_lock(&gCamLock);
2252 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2253 pthread_mutex_unlock(&gCamLock);
2254 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2255 || IS_USAGE_ZSL(newStream->usage)) &&
2256 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002257 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002258 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002259 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2260 if (bUseCommonFeatureMask)
2261 zsl_ppmask = commonFeatureMask;
2262 else
2263 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002264 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002265 if (numStreamsOnEncoder > 0)
2266 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2267 else
2268 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002269 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002270 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002271 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002272 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002273 LOGH("Input stream configured, reprocess config");
2274 } else {
2275 //for non zsl streams find out the format
2276 switch (newStream->format) {
2277 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2278 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002279 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002280 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2281 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2282 /* add additional features to pp feature mask */
2283 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2284 mStreamConfigInfo.num_streams);
2285
2286 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2287 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2288 CAM_STREAM_TYPE_VIDEO;
2289 if (m_bTnrEnabled && m_bTnrVideo) {
2290 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2291 CAM_QCOM_FEATURE_CPP_TNR;
2292 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2293 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2294 ~CAM_QCOM_FEATURE_CDS;
2295 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002296 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2297 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2298 CAM_QTI_FEATURE_PPEISCORE;
2299 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002300 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002301 } else {
2302 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2303 CAM_STREAM_TYPE_PREVIEW;
2304 if (m_bTnrEnabled && m_bTnrPreview) {
2305 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2306 CAM_QCOM_FEATURE_CPP_TNR;
2307 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2308 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2309 ~CAM_QCOM_FEATURE_CDS;
2310 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002311 if(!m_bSwTnrPreview) {
2312 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2313 ~CAM_QTI_FEATURE_SW_TNR;
2314 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002315 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002316 padding_info.width_padding = mSurfaceStridePadding;
2317 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002318 previewSize.width = (int32_t)newStream->width;
2319 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002320 }
2321 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2322 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2323 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2324 newStream->height;
2325 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2326 newStream->width;
2327 }
2328 }
2329 break;
2330 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002331 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002332 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2333 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2334 if (bUseCommonFeatureMask)
2335 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2336 commonFeatureMask;
2337 else
2338 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2339 CAM_QCOM_FEATURE_NONE;
2340 } else {
2341 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2342 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2343 }
2344 break;
2345 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002346 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002347 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2348 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2349 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2350 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2351 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002352 /* Remove rotation if it is not supported
2353 for 4K LiveVideo snapshot case (online processing) */
2354 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2355 CAM_QCOM_FEATURE_ROTATION)) {
2356 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2357 &= ~CAM_QCOM_FEATURE_ROTATION;
2358 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002359 } else {
2360 if (bUseCommonFeatureMask &&
2361 isOnEncoder(maxViewfinderSize, newStream->width,
2362 newStream->height)) {
2363 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2364 } else {
2365 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2366 }
2367 }
2368 if (isZsl) {
2369 if (zslStream) {
2370 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2371 (int32_t)zslStream->width;
2372 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2373 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002374 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2375 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002376 } else {
2377 LOGE("Error, No ZSL stream identified");
2378 pthread_mutex_unlock(&mMutex);
2379 return -EINVAL;
2380 }
2381 } else if (m_bIs4KVideo) {
2382 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2383 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2384 } else if (bYuv888OverrideJpeg) {
2385 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2386 (int32_t)largeYuv888Size.width;
2387 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2388 (int32_t)largeYuv888Size.height;
2389 }
2390 break;
2391 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2392 case HAL_PIXEL_FORMAT_RAW16:
2393 case HAL_PIXEL_FORMAT_RAW10:
2394 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2395 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2396 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002397 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2398 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2399 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2400 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2401 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2402 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2403 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2404 gCamCapability[mCameraId]->dt[mPDIndex];
2405 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2406 gCamCapability[mCameraId]->vc[mPDIndex];
2407 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002408 break;
2409 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002410 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002411 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2412 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2413 break;
2414 }
2415 }
2416
2417 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2418 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2419 gCamCapability[mCameraId]->color_arrangement);
2420
2421 if (newStream->priv == NULL) {
2422 //New stream, construct channel
2423 switch (newStream->stream_type) {
2424 case CAMERA3_STREAM_INPUT:
2425 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2426 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2427 break;
2428 case CAMERA3_STREAM_BIDIRECTIONAL:
2429 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2430 GRALLOC_USAGE_HW_CAMERA_WRITE;
2431 break;
2432 case CAMERA3_STREAM_OUTPUT:
2433 /* For video encoding stream, set read/write rarely
2434 * flag so that they may be set to un-cached */
2435 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2436 newStream->usage |=
2437 (GRALLOC_USAGE_SW_READ_RARELY |
2438 GRALLOC_USAGE_SW_WRITE_RARELY |
2439 GRALLOC_USAGE_HW_CAMERA_WRITE);
2440 else if (IS_USAGE_ZSL(newStream->usage))
2441 {
2442 LOGD("ZSL usage flag skipping");
2443 }
2444 else if (newStream == zslStream
2445 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2446 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2447 } else
2448 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2449 break;
2450 default:
2451 LOGE("Invalid stream_type %d", newStream->stream_type);
2452 break;
2453 }
2454
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002455 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002456 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2457 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2458 QCamera3ProcessingChannel *channel = NULL;
2459 switch (newStream->format) {
2460 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2461 if ((newStream->usage &
2462 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2463 (streamList->operation_mode ==
2464 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2465 ) {
2466 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2467 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002468 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002469 this,
2470 newStream,
2471 (cam_stream_type_t)
2472 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2473 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2474 mMetadataChannel,
2475 0); //heap buffers are not required for HFR video channel
2476 if (channel == NULL) {
2477 LOGE("allocation of channel failed");
2478 pthread_mutex_unlock(&mMutex);
2479 return -ENOMEM;
2480 }
2481 //channel->getNumBuffers() will return 0 here so use
2482 //MAX_INFLIGH_HFR_REQUESTS
2483 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2484 newStream->priv = channel;
2485 LOGI("num video buffers in HFR mode: %d",
2486 MAX_INFLIGHT_HFR_REQUESTS);
2487 } else {
2488 /* Copy stream contents in HFR preview only case to create
2489 * dummy batch channel so that sensor streaming is in
2490 * HFR mode */
2491 if (!m_bIsVideo && (streamList->operation_mode ==
2492 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2493 mDummyBatchStream = *newStream;
2494 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002495 int bufferCount = MAX_INFLIGHT_REQUESTS;
2496 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2497 CAM_STREAM_TYPE_VIDEO) {
2498 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2499 bufferCount = MAX_VIDEO_BUFFERS;
2500 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002501 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2502 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002503 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002504 this,
2505 newStream,
2506 (cam_stream_type_t)
2507 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2508 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2509 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002510 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002511 if (channel == NULL) {
2512 LOGE("allocation of channel failed");
2513 pthread_mutex_unlock(&mMutex);
2514 return -ENOMEM;
2515 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002516 /* disable UBWC for preview, though supported,
2517 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002518 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002519 (previewSize.width == (int32_t)videoWidth)&&
2520 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002521 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002522 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002523 channel->setUBWCEnabled(forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002524 newStream->max_buffers = channel->getNumBuffers();
2525 newStream->priv = channel;
2526 }
2527 break;
2528 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2529 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2530 mChannelHandle,
2531 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002532 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002533 this,
2534 newStream,
2535 (cam_stream_type_t)
2536 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2537 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2538 mMetadataChannel);
2539 if (channel == NULL) {
2540 LOGE("allocation of YUV channel failed");
2541 pthread_mutex_unlock(&mMutex);
2542 return -ENOMEM;
2543 }
2544 newStream->max_buffers = channel->getNumBuffers();
2545 newStream->priv = channel;
2546 break;
2547 }
2548 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2549 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002550 case HAL_PIXEL_FORMAT_RAW10: {
2551 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2552 (HAL_DATASPACE_DEPTH != newStream->data_space))
2553 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002554 mRawChannel = new QCamera3RawChannel(
2555 mCameraHandle->camera_handle, mChannelHandle,
2556 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002557 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002558 this, newStream,
2559 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002560 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002561 if (mRawChannel == NULL) {
2562 LOGE("allocation of raw channel failed");
2563 pthread_mutex_unlock(&mMutex);
2564 return -ENOMEM;
2565 }
2566 newStream->max_buffers = mRawChannel->getNumBuffers();
2567 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2568 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002569 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002570 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002571 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2572 mDepthChannel = new QCamera3DepthChannel(
2573 mCameraHandle->camera_handle, mChannelHandle,
2574 mCameraHandle->ops, NULL, NULL, &padding_info,
2575 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2576 mMetadataChannel);
2577 if (NULL == mDepthChannel) {
2578 LOGE("Allocation of depth channel failed");
2579 pthread_mutex_unlock(&mMutex);
2580 return NO_MEMORY;
2581 }
2582 newStream->priv = mDepthChannel;
2583 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2584 } else {
2585 // Max live snapshot inflight buffer is 1. This is to mitigate
2586 // frame drop issues for video snapshot. The more buffers being
2587 // allocated, the more frame drops there are.
2588 mPictureChannel = new QCamera3PicChannel(
2589 mCameraHandle->camera_handle, mChannelHandle,
2590 mCameraHandle->ops, captureResultCb,
2591 setBufferErrorStatus, &padding_info, this, newStream,
2592 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2593 m_bIs4KVideo, isZsl, mMetadataChannel,
2594 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2595 if (mPictureChannel == NULL) {
2596 LOGE("allocation of channel failed");
2597 pthread_mutex_unlock(&mMutex);
2598 return -ENOMEM;
2599 }
2600 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2601 newStream->max_buffers = mPictureChannel->getNumBuffers();
2602 mPictureChannel->overrideYuvSize(
2603 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2604 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002605 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002606 break;
2607
2608 default:
2609 LOGE("not a supported format 0x%x", newStream->format);
2610 break;
2611 }
2612 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2613 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2614 } else {
2615 LOGE("Error, Unknown stream type");
2616 pthread_mutex_unlock(&mMutex);
2617 return -EINVAL;
2618 }
2619
2620 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002621 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2622 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002623 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002624 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002625 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2626 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2627 }
2628 }
2629
2630 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2631 it != mStreamInfo.end(); it++) {
2632 if ((*it)->stream == newStream) {
2633 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2634 break;
2635 }
2636 }
2637 } else {
2638 // Channel already exists for this stream
2639 // Do nothing for now
2640 }
2641 padding_info = gCamCapability[mCameraId]->padding_info;
2642
Emilian Peev7650c122017-01-19 08:24:33 -08002643 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002644 * since there is no real stream associated with it
2645 */
Emilian Peev7650c122017-01-19 08:24:33 -08002646 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002647 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2648 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002649 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002650 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002651 }
2652
Binhao Lincdb362a2017-04-20 13:31:54 -07002653 // By default, preview stream TNR is disabled.
2654 // Enable TNR to the preview stream if all conditions below are satisfied:
2655 // 1. resolution <= 1080p.
2656 // 2. preview resolution == video resolution.
2657 // 3. video stream TNR is enabled.
2658 // 4. EIS2.0
2659 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2660 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2661 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2662 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2663 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2664 video_stream->width == preview_stream->width &&
2665 video_stream->height == preview_stream->height) {
2666 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2667 CAM_QCOM_FEATURE_CPP_TNR;
2668 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2669 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2670 ~CAM_QCOM_FEATURE_CDS;
2671 }
2672 }
2673
Thierry Strudel2896d122017-02-23 19:18:03 -08002674 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2675 onlyRaw = false;
2676 }
2677
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002678 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002679 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002680 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002681 cam_analysis_info_t analysisInfo;
2682 int32_t ret = NO_ERROR;
2683 ret = mCommon.getAnalysisInfo(
2684 FALSE,
2685 analysisFeatureMask,
2686 &analysisInfo);
2687 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002688 cam_color_filter_arrangement_t analysis_color_arrangement =
2689 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2690 CAM_FILTER_ARRANGEMENT_Y :
2691 gCamCapability[mCameraId]->color_arrangement);
2692 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2693 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002694 cam_dimension_t analysisDim;
2695 analysisDim = mCommon.getMatchingDimension(previewSize,
2696 analysisInfo.analysis_recommended_res);
2697
2698 mAnalysisChannel = new QCamera3SupportChannel(
2699 mCameraHandle->camera_handle,
2700 mChannelHandle,
2701 mCameraHandle->ops,
2702 &analysisInfo.analysis_padding_info,
2703 analysisFeatureMask,
2704 CAM_STREAM_TYPE_ANALYSIS,
2705 &analysisDim,
2706 (analysisInfo.analysis_format
2707 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2708 : CAM_FORMAT_YUV_420_NV21),
2709 analysisInfo.hw_analysis_supported,
2710 gCamCapability[mCameraId]->color_arrangement,
2711 this,
2712 0); // force buffer count to 0
2713 } else {
2714 LOGW("getAnalysisInfo failed, ret = %d", ret);
2715 }
2716 if (!mAnalysisChannel) {
2717 LOGW("Analysis channel cannot be created");
2718 }
2719 }
2720
Thierry Strudel3d639192016-09-09 11:52:26 -07002721 //RAW DUMP channel
2722 if (mEnableRawDump && isRawStreamRequested == false){
2723 cam_dimension_t rawDumpSize;
2724 rawDumpSize = getMaxRawSize(mCameraId);
2725 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2726 setPAAFSupport(rawDumpFeatureMask,
2727 CAM_STREAM_TYPE_RAW,
2728 gCamCapability[mCameraId]->color_arrangement);
2729 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2730 mChannelHandle,
2731 mCameraHandle->ops,
2732 rawDumpSize,
2733 &padding_info,
2734 this, rawDumpFeatureMask);
2735 if (!mRawDumpChannel) {
2736 LOGE("Raw Dump channel cannot be created");
2737 pthread_mutex_unlock(&mMutex);
2738 return -ENOMEM;
2739 }
2740 }
2741
Thierry Strudel3d639192016-09-09 11:52:26 -07002742 if (mAnalysisChannel) {
2743 cam_analysis_info_t analysisInfo;
2744 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2745 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2746 CAM_STREAM_TYPE_ANALYSIS;
2747 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2748 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002749 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002750 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2751 &analysisInfo);
2752 if (rc != NO_ERROR) {
2753 LOGE("getAnalysisInfo failed, ret = %d", rc);
2754 pthread_mutex_unlock(&mMutex);
2755 return rc;
2756 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002757 cam_color_filter_arrangement_t analysis_color_arrangement =
2758 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2759 CAM_FILTER_ARRANGEMENT_Y :
2760 gCamCapability[mCameraId]->color_arrangement);
2761 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2762 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2763 analysis_color_arrangement);
2764
Thierry Strudel3d639192016-09-09 11:52:26 -07002765 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002766 mCommon.getMatchingDimension(previewSize,
2767 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002768 mStreamConfigInfo.num_streams++;
2769 }
2770
Thierry Strudel2896d122017-02-23 19:18:03 -08002771 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002772 cam_analysis_info_t supportInfo;
2773 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2774 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2775 setPAAFSupport(callbackFeatureMask,
2776 CAM_STREAM_TYPE_CALLBACK,
2777 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002778 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002779 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002780 if (ret != NO_ERROR) {
2781 /* Ignore the error for Mono camera
2782 * because the PAAF bit mask is only set
2783 * for CAM_STREAM_TYPE_ANALYSIS stream type
2784 */
2785 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2786 LOGW("getAnalysisInfo failed, ret = %d", ret);
2787 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002788 }
2789 mSupportChannel = new QCamera3SupportChannel(
2790 mCameraHandle->camera_handle,
2791 mChannelHandle,
2792 mCameraHandle->ops,
2793 &gCamCapability[mCameraId]->padding_info,
2794 callbackFeatureMask,
2795 CAM_STREAM_TYPE_CALLBACK,
2796 &QCamera3SupportChannel::kDim,
2797 CAM_FORMAT_YUV_420_NV21,
2798 supportInfo.hw_analysis_supported,
2799 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002800 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002801 if (!mSupportChannel) {
2802 LOGE("dummy channel cannot be created");
2803 pthread_mutex_unlock(&mMutex);
2804 return -ENOMEM;
2805 }
2806 }
2807
2808 if (mSupportChannel) {
2809 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2810 QCamera3SupportChannel::kDim;
2811 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2812 CAM_STREAM_TYPE_CALLBACK;
2813 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2814 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2815 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2816 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2817 gCamCapability[mCameraId]->color_arrangement);
2818 mStreamConfigInfo.num_streams++;
2819 }
2820
2821 if (mRawDumpChannel) {
2822 cam_dimension_t rawSize;
2823 rawSize = getMaxRawSize(mCameraId);
2824 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2825 rawSize;
2826 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2827 CAM_STREAM_TYPE_RAW;
2828 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2829 CAM_QCOM_FEATURE_NONE;
2830 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2831 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2832 gCamCapability[mCameraId]->color_arrangement);
2833 mStreamConfigInfo.num_streams++;
2834 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002835
2836 if (mHdrPlusRawSrcChannel) {
2837 cam_dimension_t rawSize;
2838 rawSize = getMaxRawSize(mCameraId);
2839 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2840 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2841 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2842 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2843 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2844 gCamCapability[mCameraId]->color_arrangement);
2845 mStreamConfigInfo.num_streams++;
2846 }
2847
Thierry Strudel3d639192016-09-09 11:52:26 -07002848 /* In HFR mode, if video stream is not added, create a dummy channel so that
2849 * ISP can create a batch mode even for preview only case. This channel is
2850 * never 'start'ed (no stream-on), it is only 'initialized' */
2851 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2852 !m_bIsVideo) {
2853 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2854 setPAAFSupport(dummyFeatureMask,
2855 CAM_STREAM_TYPE_VIDEO,
2856 gCamCapability[mCameraId]->color_arrangement);
2857 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2858 mChannelHandle,
2859 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002860 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002861 this,
2862 &mDummyBatchStream,
2863 CAM_STREAM_TYPE_VIDEO,
2864 dummyFeatureMask,
2865 mMetadataChannel);
2866 if (NULL == mDummyBatchChannel) {
2867 LOGE("creation of mDummyBatchChannel failed."
2868 "Preview will use non-hfr sensor mode ");
2869 }
2870 }
2871 if (mDummyBatchChannel) {
2872 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2873 mDummyBatchStream.width;
2874 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2875 mDummyBatchStream.height;
2876 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2877 CAM_STREAM_TYPE_VIDEO;
2878 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2879 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2880 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2881 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2882 gCamCapability[mCameraId]->color_arrangement);
2883 mStreamConfigInfo.num_streams++;
2884 }
2885
2886 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2887 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002888 m_bIs4KVideo ? 0 :
2889 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002890
2891 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2892 for (pendingRequestIterator i = mPendingRequestsList.begin();
2893 i != mPendingRequestsList.end();) {
2894 i = erasePendingRequest(i);
2895 }
2896 mPendingFrameDropList.clear();
2897 // Initialize/Reset the pending buffers list
2898 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2899 req.mPendingBufferList.clear();
2900 }
2901 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2902
Thierry Strudel3d639192016-09-09 11:52:26 -07002903 mCurJpegMeta.clear();
2904 //Get min frame duration for this streams configuration
2905 deriveMinFrameDuration();
2906
Chien-Yu Chenee335912017-02-09 17:53:20 -08002907 mFirstPreviewIntentSeen = false;
2908
2909 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002910 {
2911 Mutex::Autolock l(gHdrPlusClientLock);
2912 disableHdrPlusModeLocked();
2913 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002914
Thierry Strudel3d639192016-09-09 11:52:26 -07002915 // Update state
2916 mState = CONFIGURED;
2917
Shuzhen Wang3c077d72017-04-20 22:48:59 -07002918 mFirstMetadataCallback = true;
2919
Thierry Strudel3d639192016-09-09 11:52:26 -07002920 pthread_mutex_unlock(&mMutex);
2921
2922 return rc;
2923}
2924
2925/*===========================================================================
2926 * FUNCTION : validateCaptureRequest
2927 *
2928 * DESCRIPTION: validate a capture request from camera service
2929 *
2930 * PARAMETERS :
2931 * @request : request from framework to process
2932 *
2933 * RETURN :
2934 *
2935 *==========================================================================*/
2936int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002937 camera3_capture_request_t *request,
2938 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002939{
2940 ssize_t idx = 0;
2941 const camera3_stream_buffer_t *b;
2942 CameraMetadata meta;
2943
2944 /* Sanity check the request */
2945 if (request == NULL) {
2946 LOGE("NULL capture request");
2947 return BAD_VALUE;
2948 }
2949
2950 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2951 /*settings cannot be null for the first request*/
2952 return BAD_VALUE;
2953 }
2954
2955 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002956 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2957 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002958 LOGE("Request %d: No output buffers provided!",
2959 __FUNCTION__, frameNumber);
2960 return BAD_VALUE;
2961 }
2962 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2963 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2964 request->num_output_buffers, MAX_NUM_STREAMS);
2965 return BAD_VALUE;
2966 }
2967 if (request->input_buffer != NULL) {
2968 b = request->input_buffer;
2969 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2970 LOGE("Request %d: Buffer %ld: Status not OK!",
2971 frameNumber, (long)idx);
2972 return BAD_VALUE;
2973 }
2974 if (b->release_fence != -1) {
2975 LOGE("Request %d: Buffer %ld: Has a release fence!",
2976 frameNumber, (long)idx);
2977 return BAD_VALUE;
2978 }
2979 if (b->buffer == NULL) {
2980 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2981 frameNumber, (long)idx);
2982 return BAD_VALUE;
2983 }
2984 }
2985
2986 // Validate all buffers
2987 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002988 if (b == NULL) {
2989 return BAD_VALUE;
2990 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002991 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002992 QCamera3ProcessingChannel *channel =
2993 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2994 if (channel == NULL) {
2995 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2996 frameNumber, (long)idx);
2997 return BAD_VALUE;
2998 }
2999 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3000 LOGE("Request %d: Buffer %ld: Status not OK!",
3001 frameNumber, (long)idx);
3002 return BAD_VALUE;
3003 }
3004 if (b->release_fence != -1) {
3005 LOGE("Request %d: Buffer %ld: Has a release fence!",
3006 frameNumber, (long)idx);
3007 return BAD_VALUE;
3008 }
3009 if (b->buffer == NULL) {
3010 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3011 frameNumber, (long)idx);
3012 return BAD_VALUE;
3013 }
3014 if (*(b->buffer) == NULL) {
3015 LOGE("Request %d: Buffer %ld: NULL private handle!",
3016 frameNumber, (long)idx);
3017 return BAD_VALUE;
3018 }
3019 idx++;
3020 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003021 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003022 return NO_ERROR;
3023}
3024
3025/*===========================================================================
3026 * FUNCTION : deriveMinFrameDuration
3027 *
3028 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3029 * on currently configured streams.
3030 *
3031 * PARAMETERS : NONE
3032 *
3033 * RETURN : NONE
3034 *
3035 *==========================================================================*/
3036void QCamera3HardwareInterface::deriveMinFrameDuration()
3037{
3038 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3039
3040 maxJpegDim = 0;
3041 maxProcessedDim = 0;
3042 maxRawDim = 0;
3043
3044 // Figure out maximum jpeg, processed, and raw dimensions
3045 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3046 it != mStreamInfo.end(); it++) {
3047
3048 // Input stream doesn't have valid stream_type
3049 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3050 continue;
3051
3052 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3053 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3054 if (dimension > maxJpegDim)
3055 maxJpegDim = dimension;
3056 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3057 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3058 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3059 if (dimension > maxRawDim)
3060 maxRawDim = dimension;
3061 } else {
3062 if (dimension > maxProcessedDim)
3063 maxProcessedDim = dimension;
3064 }
3065 }
3066
3067 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3068 MAX_SIZES_CNT);
3069
3070 //Assume all jpeg dimensions are in processed dimensions.
3071 if (maxJpegDim > maxProcessedDim)
3072 maxProcessedDim = maxJpegDim;
3073 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3074 if (maxProcessedDim > maxRawDim) {
3075 maxRawDim = INT32_MAX;
3076
3077 for (size_t i = 0; i < count; i++) {
3078 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3079 gCamCapability[mCameraId]->raw_dim[i].height;
3080 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3081 maxRawDim = dimension;
3082 }
3083 }
3084
3085 //Find minimum durations for processed, jpeg, and raw
3086 for (size_t i = 0; i < count; i++) {
3087 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3088 gCamCapability[mCameraId]->raw_dim[i].height) {
3089 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3090 break;
3091 }
3092 }
3093 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3094 for (size_t i = 0; i < count; i++) {
3095 if (maxProcessedDim ==
3096 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3097 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3098 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3099 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3100 break;
3101 }
3102 }
3103}
3104
3105/*===========================================================================
3106 * FUNCTION : getMinFrameDuration
3107 *
3108 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3109 * and current request configuration.
3110 *
3111 * PARAMETERS : @request: requset sent by the frameworks
3112 *
3113 * RETURN : min farme duration for a particular request
3114 *
3115 *==========================================================================*/
3116int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3117{
3118 bool hasJpegStream = false;
3119 bool hasRawStream = false;
3120 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3121 const camera3_stream_t *stream = request->output_buffers[i].stream;
3122 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3123 hasJpegStream = true;
3124 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3125 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3126 stream->format == HAL_PIXEL_FORMAT_RAW16)
3127 hasRawStream = true;
3128 }
3129
3130 if (!hasJpegStream)
3131 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3132 else
3133 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3134}
3135
3136/*===========================================================================
3137 * FUNCTION : handleBuffersDuringFlushLock
3138 *
3139 * DESCRIPTION: Account for buffers returned from back-end during flush
3140 * This function is executed while mMutex is held by the caller.
3141 *
3142 * PARAMETERS :
3143 * @buffer: image buffer for the callback
3144 *
3145 * RETURN :
3146 *==========================================================================*/
3147void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3148{
3149 bool buffer_found = false;
3150 for (List<PendingBuffersInRequest>::iterator req =
3151 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3152 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3153 for (List<PendingBufferInfo>::iterator i =
3154 req->mPendingBufferList.begin();
3155 i != req->mPendingBufferList.end(); i++) {
3156 if (i->buffer == buffer->buffer) {
3157 mPendingBuffersMap.numPendingBufsAtFlush--;
3158 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3159 buffer->buffer, req->frame_number,
3160 mPendingBuffersMap.numPendingBufsAtFlush);
3161 buffer_found = true;
3162 break;
3163 }
3164 }
3165 if (buffer_found) {
3166 break;
3167 }
3168 }
3169 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3170 //signal the flush()
3171 LOGD("All buffers returned to HAL. Continue flush");
3172 pthread_cond_signal(&mBuffersCond);
3173 }
3174}
3175
Thierry Strudel3d639192016-09-09 11:52:26 -07003176/*===========================================================================
3177 * FUNCTION : handleBatchMetadata
3178 *
3179 * DESCRIPTION: Handles metadata buffer callback in batch mode
3180 *
3181 * PARAMETERS : @metadata_buf: metadata buffer
3182 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3183 * the meta buf in this method
3184 *
3185 * RETURN :
3186 *
3187 *==========================================================================*/
3188void QCamera3HardwareInterface::handleBatchMetadata(
3189 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3190{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003191 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003192
3193 if (NULL == metadata_buf) {
3194 LOGE("metadata_buf is NULL");
3195 return;
3196 }
3197 /* In batch mode, the metdata will contain the frame number and timestamp of
3198 * the last frame in the batch. Eg: a batch containing buffers from request
3199 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3200 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3201 * multiple process_capture_results */
3202 metadata_buffer_t *metadata =
3203 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3204 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3205 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3206 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3207 uint32_t frame_number = 0, urgent_frame_number = 0;
3208 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3209 bool invalid_metadata = false;
3210 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3211 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003212 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003213
3214 int32_t *p_frame_number_valid =
3215 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3216 uint32_t *p_frame_number =
3217 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3218 int64_t *p_capture_time =
3219 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3220 int32_t *p_urgent_frame_number_valid =
3221 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3222 uint32_t *p_urgent_frame_number =
3223 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3224
3225 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3226 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3227 (NULL == p_urgent_frame_number)) {
3228 LOGE("Invalid metadata");
3229 invalid_metadata = true;
3230 } else {
3231 frame_number_valid = *p_frame_number_valid;
3232 last_frame_number = *p_frame_number;
3233 last_frame_capture_time = *p_capture_time;
3234 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3235 last_urgent_frame_number = *p_urgent_frame_number;
3236 }
3237
3238 /* In batchmode, when no video buffers are requested, set_parms are sent
3239 * for every capture_request. The difference between consecutive urgent
3240 * frame numbers and frame numbers should be used to interpolate the
3241 * corresponding frame numbers and time stamps */
3242 pthread_mutex_lock(&mMutex);
3243 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003244 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3245 if(idx < 0) {
3246 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3247 last_urgent_frame_number);
3248 mState = ERROR;
3249 pthread_mutex_unlock(&mMutex);
3250 return;
3251 }
3252 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003253 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3254 first_urgent_frame_number;
3255
3256 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3257 urgent_frame_number_valid,
3258 first_urgent_frame_number, last_urgent_frame_number);
3259 }
3260
3261 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003262 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3263 if(idx < 0) {
3264 LOGE("Invalid frame number received: %d. Irrecoverable error",
3265 last_frame_number);
3266 mState = ERROR;
3267 pthread_mutex_unlock(&mMutex);
3268 return;
3269 }
3270 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003271 frameNumDiff = last_frame_number + 1 -
3272 first_frame_number;
3273 mPendingBatchMap.removeItem(last_frame_number);
3274
3275 LOGD("frm: valid: %d frm_num: %d - %d",
3276 frame_number_valid,
3277 first_frame_number, last_frame_number);
3278
3279 }
3280 pthread_mutex_unlock(&mMutex);
3281
3282 if (urgent_frame_number_valid || frame_number_valid) {
3283 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3284 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3285 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3286 urgentFrameNumDiff, last_urgent_frame_number);
3287 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3288 LOGE("frameNumDiff: %d frameNum: %d",
3289 frameNumDiff, last_frame_number);
3290 }
3291
3292 for (size_t i = 0; i < loopCount; i++) {
3293 /* handleMetadataWithLock is called even for invalid_metadata for
3294 * pipeline depth calculation */
3295 if (!invalid_metadata) {
3296 /* Infer frame number. Batch metadata contains frame number of the
3297 * last frame */
3298 if (urgent_frame_number_valid) {
3299 if (i < urgentFrameNumDiff) {
3300 urgent_frame_number =
3301 first_urgent_frame_number + i;
3302 LOGD("inferred urgent frame_number: %d",
3303 urgent_frame_number);
3304 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3305 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3306 } else {
3307 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3308 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3309 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3310 }
3311 }
3312
3313 /* Infer frame number. Batch metadata contains frame number of the
3314 * last frame */
3315 if (frame_number_valid) {
3316 if (i < frameNumDiff) {
3317 frame_number = first_frame_number + i;
3318 LOGD("inferred frame_number: %d", frame_number);
3319 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3320 CAM_INTF_META_FRAME_NUMBER, frame_number);
3321 } else {
3322 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3323 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3324 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3325 }
3326 }
3327
3328 if (last_frame_capture_time) {
3329 //Infer timestamp
3330 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003331 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003332 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003333 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003334 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3335 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3336 LOGD("batch capture_time: %lld, capture_time: %lld",
3337 last_frame_capture_time, capture_time);
3338 }
3339 }
3340 pthread_mutex_lock(&mMutex);
3341 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003342 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003343 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3344 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003345 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003346 pthread_mutex_unlock(&mMutex);
3347 }
3348
3349 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003350 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003351 mMetadataChannel->bufDone(metadata_buf);
3352 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003353 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003354 }
3355}
3356
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003357void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3358 camera3_error_msg_code_t errorCode)
3359{
3360 camera3_notify_msg_t notify_msg;
3361 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3362 notify_msg.type = CAMERA3_MSG_ERROR;
3363 notify_msg.message.error.error_code = errorCode;
3364 notify_msg.message.error.error_stream = NULL;
3365 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003366 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003367
3368 return;
3369}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003370
3371/*===========================================================================
3372 * FUNCTION : sendPartialMetadataWithLock
3373 *
3374 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3375 *
3376 * PARAMETERS : @metadata: metadata buffer
3377 * @requestIter: The iterator for the pending capture request for
3378 * which the partial result is being sen
3379 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3380 * last urgent metadata in a batch. Always true for non-batch mode
3381 *
3382 * RETURN :
3383 *
3384 *==========================================================================*/
3385
3386void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3387 metadata_buffer_t *metadata,
3388 const pendingRequestIterator requestIter,
3389 bool lastUrgentMetadataInBatch)
3390{
3391 camera3_capture_result_t result;
3392 memset(&result, 0, sizeof(camera3_capture_result_t));
3393
3394 requestIter->partial_result_cnt++;
3395
3396 // Extract 3A metadata
3397 result.result = translateCbUrgentMetadataToResultMetadata(
3398 metadata, lastUrgentMetadataInBatch);
3399 // Populate metadata result
3400 result.frame_number = requestIter->frame_number;
3401 result.num_output_buffers = 0;
3402 result.output_buffers = NULL;
3403 result.partial_result = requestIter->partial_result_cnt;
3404
3405 {
3406 Mutex::Autolock l(gHdrPlusClientLock);
3407 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3408 // Notify HDR+ client about the partial metadata.
3409 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3410 result.partial_result == PARTIAL_RESULT_COUNT);
3411 }
3412 }
3413
3414 orchestrateResult(&result);
3415 LOGD("urgent frame_number = %u", result.frame_number);
3416 free_camera_metadata((camera_metadata_t *)result.result);
3417}
3418
Thierry Strudel3d639192016-09-09 11:52:26 -07003419/*===========================================================================
3420 * FUNCTION : handleMetadataWithLock
3421 *
3422 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3423 *
3424 * PARAMETERS : @metadata_buf: metadata buffer
3425 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3426 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003427 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3428 * last urgent metadata in a batch. Always true for non-batch mode
3429 * @lastMetadataInBatch: Boolean to indicate whether this is the
3430 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003431 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3432 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003433 *
3434 * RETURN :
3435 *
3436 *==========================================================================*/
3437void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003438 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003439 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3440 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003441{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003442 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003443 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3444 //during flush do not send metadata from this thread
3445 LOGD("not sending metadata during flush or when mState is error");
3446 if (free_and_bufdone_meta_buf) {
3447 mMetadataChannel->bufDone(metadata_buf);
3448 free(metadata_buf);
3449 }
3450 return;
3451 }
3452
3453 //not in flush
3454 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3455 int32_t frame_number_valid, urgent_frame_number_valid;
3456 uint32_t frame_number, urgent_frame_number;
3457 int64_t capture_time;
3458 nsecs_t currentSysTime;
3459
3460 int32_t *p_frame_number_valid =
3461 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3462 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3463 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3464 int32_t *p_urgent_frame_number_valid =
3465 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3466 uint32_t *p_urgent_frame_number =
3467 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3468 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3469 metadata) {
3470 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3471 *p_frame_number_valid, *p_frame_number);
3472 }
3473
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003474 camera_metadata_t *resultMetadata = nullptr;
3475
Thierry Strudel3d639192016-09-09 11:52:26 -07003476 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3477 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3478 LOGE("Invalid metadata");
3479 if (free_and_bufdone_meta_buf) {
3480 mMetadataChannel->bufDone(metadata_buf);
3481 free(metadata_buf);
3482 }
3483 goto done_metadata;
3484 }
3485 frame_number_valid = *p_frame_number_valid;
3486 frame_number = *p_frame_number;
3487 capture_time = *p_capture_time;
3488 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3489 urgent_frame_number = *p_urgent_frame_number;
3490 currentSysTime = systemTime(CLOCK_MONOTONIC);
3491
3492 // Detect if buffers from any requests are overdue
3493 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003494 int64_t timeout;
3495 {
3496 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3497 // If there is a pending HDR+ request, the following requests may be blocked until the
3498 // HDR+ request is done. So allow a longer timeout.
3499 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3500 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3501 }
3502
3503 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003504 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003505 assert(missed.stream->priv);
3506 if (missed.stream->priv) {
3507 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3508 assert(ch->mStreams[0]);
3509 if (ch->mStreams[0]) {
3510 LOGE("Cancel missing frame = %d, buffer = %p,"
3511 "stream type = %d, stream format = %d",
3512 req.frame_number, missed.buffer,
3513 ch->mStreams[0]->getMyType(), missed.stream->format);
3514 ch->timeoutFrame(req.frame_number);
3515 }
3516 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003517 }
3518 }
3519 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003520 //For the very first metadata callback, regardless whether it contains valid
3521 //frame number, send the partial metadata for the jumpstarting requests.
3522 //Note that this has to be done even if the metadata doesn't contain valid
3523 //urgent frame number, because in the case only 1 request is ever submitted
3524 //to HAL, there won't be subsequent valid urgent frame number.
3525 if (mFirstMetadataCallback) {
3526 for (pendingRequestIterator i =
3527 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3528 if (i->bUseFirstPartial) {
3529 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3530 }
3531 }
3532 mFirstMetadataCallback = false;
3533 }
3534
Thierry Strudel3d639192016-09-09 11:52:26 -07003535 //Partial result on process_capture_result for timestamp
3536 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003537 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003538
3539 //Recieved an urgent Frame Number, handle it
3540 //using partial results
3541 for (pendingRequestIterator i =
3542 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3543 LOGD("Iterator Frame = %d urgent frame = %d",
3544 i->frame_number, urgent_frame_number);
3545
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003546 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003547 (i->partial_result_cnt == 0)) {
3548 LOGE("Error: HAL missed urgent metadata for frame number %d",
3549 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003550 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003551 }
3552
3553 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003554 i->partial_result_cnt == 0) {
3555 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003556 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3557 // Instant AEC settled for this frame.
3558 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3559 mInstantAECSettledFrameNumber = urgent_frame_number;
3560 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003561 break;
3562 }
3563 }
3564 }
3565
3566 if (!frame_number_valid) {
3567 LOGD("Not a valid normal frame number, used as SOF only");
3568 if (free_and_bufdone_meta_buf) {
3569 mMetadataChannel->bufDone(metadata_buf);
3570 free(metadata_buf);
3571 }
3572 goto done_metadata;
3573 }
3574 LOGH("valid frame_number = %u, capture_time = %lld",
3575 frame_number, capture_time);
3576
Emilian Peev7650c122017-01-19 08:24:33 -08003577 if (metadata->is_depth_data_valid) {
3578 handleDepthDataLocked(metadata->depth_data, frame_number);
3579 }
3580
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003581 // Check whether any stream buffer corresponding to this is dropped or not
3582 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3583 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3584 for (auto & pendingRequest : mPendingRequestsList) {
3585 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3586 mInstantAECSettledFrameNumber)) {
3587 camera3_notify_msg_t notify_msg = {};
3588 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003589 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003590 QCamera3ProcessingChannel *channel =
3591 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003592 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003593 if (p_cam_frame_drop) {
3594 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003595 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003596 // Got the stream ID for drop frame.
3597 dropFrame = true;
3598 break;
3599 }
3600 }
3601 } else {
3602 // This is instant AEC case.
3603 // For instant AEC drop the stream untill AEC is settled.
3604 dropFrame = true;
3605 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003606
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003607 if (dropFrame) {
3608 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3609 if (p_cam_frame_drop) {
3610 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003611 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003612 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003613 } else {
3614 // For instant AEC, inform frame drop and frame number
3615 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3616 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003617 pendingRequest.frame_number, streamID,
3618 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003619 }
3620 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003621 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003622 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003623 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003624 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003625 if (p_cam_frame_drop) {
3626 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003627 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003628 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003629 } else {
3630 // For instant AEC, inform frame drop and frame number
3631 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3632 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003633 pendingRequest.frame_number, streamID,
3634 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003635 }
3636 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003637 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003638 PendingFrameDrop.stream_ID = streamID;
3639 // Add the Frame drop info to mPendingFrameDropList
3640 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003641 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003642 }
3643 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003644 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003645
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003646 for (auto & pendingRequest : mPendingRequestsList) {
3647 // Find the pending request with the frame number.
3648 if (pendingRequest.frame_number == frame_number) {
3649 // Update the sensor timestamp.
3650 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003651
Thierry Strudel3d639192016-09-09 11:52:26 -07003652
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003653 /* Set the timestamp in display metadata so that clients aware of
3654 private_handle such as VT can use this un-modified timestamps.
3655 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003656 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003657
Thierry Strudel3d639192016-09-09 11:52:26 -07003658 // Find channel requiring metadata, meaning internal offline postprocess
3659 // is needed.
3660 //TODO: for now, we don't support two streams requiring metadata at the same time.
3661 // (because we are not making copies, and metadata buffer is not reference counted.
3662 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003663 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3664 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003665 if (iter->need_metadata) {
3666 internalPproc = true;
3667 QCamera3ProcessingChannel *channel =
3668 (QCamera3ProcessingChannel *)iter->stream->priv;
3669 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003670 if(p_is_metabuf_queued != NULL) {
3671 *p_is_metabuf_queued = true;
3672 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003673 break;
3674 }
3675 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003676 for (auto itr = pendingRequest.internalRequestList.begin();
3677 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003678 if (itr->need_metadata) {
3679 internalPproc = true;
3680 QCamera3ProcessingChannel *channel =
3681 (QCamera3ProcessingChannel *)itr->stream->priv;
3682 channel->queueReprocMetadata(metadata_buf);
3683 break;
3684 }
3685 }
3686
Thierry Strudel54dc9782017-02-15 12:12:10 -08003687 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003688
3689 bool *enableZsl = nullptr;
3690 if (gExposeEnableZslKey) {
3691 enableZsl = &pendingRequest.enableZsl;
3692 }
3693
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003694 resultMetadata = translateFromHalMetadata(metadata,
3695 pendingRequest.timestamp, pendingRequest.request_id,
3696 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3697 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003698 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003699 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003700 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003701 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003702 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003703 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003704
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003705 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003706
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003707 if (pendingRequest.blob_request) {
3708 //Dump tuning metadata if enabled and available
3709 char prop[PROPERTY_VALUE_MAX];
3710 memset(prop, 0, sizeof(prop));
3711 property_get("persist.camera.dumpmetadata", prop, "0");
3712 int32_t enabled = atoi(prop);
3713 if (enabled && metadata->is_tuning_params_valid) {
3714 dumpMetadataToFile(metadata->tuning_params,
3715 mMetaFrameCount,
3716 enabled,
3717 "Snapshot",
3718 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003719 }
3720 }
3721
3722 if (!internalPproc) {
3723 LOGD("couldn't find need_metadata for this metadata");
3724 // Return metadata buffer
3725 if (free_and_bufdone_meta_buf) {
3726 mMetadataChannel->bufDone(metadata_buf);
3727 free(metadata_buf);
3728 }
3729 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003730
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003731 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003732 }
3733 }
3734
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003735 // Try to send out shutter callbacks and capture results.
3736 handlePendingResultsWithLock(frame_number, resultMetadata);
3737 return;
3738
Thierry Strudel3d639192016-09-09 11:52:26 -07003739done_metadata:
3740 for (pendingRequestIterator i = mPendingRequestsList.begin();
3741 i != mPendingRequestsList.end() ;i++) {
3742 i->pipeline_depth++;
3743 }
3744 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3745 unblockRequestIfNecessary();
3746}
3747
3748/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003749 * FUNCTION : handleDepthDataWithLock
3750 *
3751 * DESCRIPTION: Handles incoming depth data
3752 *
3753 * PARAMETERS : @depthData : Depth data
3754 * @frameNumber: Frame number of the incoming depth data
3755 *
3756 * RETURN :
3757 *
3758 *==========================================================================*/
3759void QCamera3HardwareInterface::handleDepthDataLocked(
3760 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3761 uint32_t currentFrameNumber;
3762 buffer_handle_t *depthBuffer;
3763
3764 if (nullptr == mDepthChannel) {
3765 LOGE("Depth channel not present!");
3766 return;
3767 }
3768
3769 camera3_stream_buffer_t resultBuffer =
3770 {.acquire_fence = -1,
3771 .release_fence = -1,
3772 .status = CAMERA3_BUFFER_STATUS_OK,
3773 .buffer = nullptr,
3774 .stream = mDepthChannel->getStream()};
3775 camera3_capture_result_t result =
3776 {.result = nullptr,
3777 .num_output_buffers = 1,
3778 .output_buffers = &resultBuffer,
3779 .partial_result = 0,
3780 .frame_number = 0};
3781
3782 do {
3783 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3784 if (nullptr == depthBuffer) {
3785 break;
3786 }
3787
3788 result.frame_number = currentFrameNumber;
3789 resultBuffer.buffer = depthBuffer;
3790 if (currentFrameNumber == frameNumber) {
3791 int32_t rc = mDepthChannel->populateDepthData(depthData,
3792 frameNumber);
3793 if (NO_ERROR != rc) {
3794 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3795 } else {
3796 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3797 }
3798 } else if (currentFrameNumber > frameNumber) {
3799 break;
3800 } else {
3801 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3802 {{currentFrameNumber, mDepthChannel->getStream(),
3803 CAMERA3_MSG_ERROR_BUFFER}}};
3804 orchestrateNotify(&notify_msg);
3805
3806 LOGE("Depth buffer for frame number: %d is missing "
3807 "returning back!", currentFrameNumber);
3808 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3809 }
3810 mDepthChannel->unmapBuffer(currentFrameNumber);
3811
3812 orchestrateResult(&result);
3813 } while (currentFrameNumber < frameNumber);
3814}
3815
3816/*===========================================================================
3817 * FUNCTION : notifyErrorFoPendingDepthData
3818 *
3819 * DESCRIPTION: Returns error for any pending depth buffers
3820 *
3821 * PARAMETERS : depthCh - depth channel that needs to get flushed
3822 *
3823 * RETURN :
3824 *
3825 *==========================================================================*/
3826void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3827 QCamera3DepthChannel *depthCh) {
3828 uint32_t currentFrameNumber;
3829 buffer_handle_t *depthBuffer;
3830
3831 if (nullptr == depthCh) {
3832 return;
3833 }
3834
3835 camera3_notify_msg_t notify_msg =
3836 {.type = CAMERA3_MSG_ERROR,
3837 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3838 camera3_stream_buffer_t resultBuffer =
3839 {.acquire_fence = -1,
3840 .release_fence = -1,
3841 .buffer = nullptr,
3842 .stream = depthCh->getStream(),
3843 .status = CAMERA3_BUFFER_STATUS_ERROR};
3844 camera3_capture_result_t result =
3845 {.result = nullptr,
3846 .frame_number = 0,
3847 .num_output_buffers = 1,
3848 .partial_result = 0,
3849 .output_buffers = &resultBuffer};
3850
3851 while (nullptr !=
3852 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3853 depthCh->unmapBuffer(currentFrameNumber);
3854
3855 notify_msg.message.error.frame_number = currentFrameNumber;
3856 orchestrateNotify(&notify_msg);
3857
3858 resultBuffer.buffer = depthBuffer;
3859 result.frame_number = currentFrameNumber;
3860 orchestrateResult(&result);
3861 };
3862}
3863
3864/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003865 * FUNCTION : hdrPlusPerfLock
3866 *
3867 * DESCRIPTION: perf lock for HDR+ using custom intent
3868 *
3869 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3870 *
3871 * RETURN : None
3872 *
3873 *==========================================================================*/
3874void QCamera3HardwareInterface::hdrPlusPerfLock(
3875 mm_camera_super_buf_t *metadata_buf)
3876{
3877 if (NULL == metadata_buf) {
3878 LOGE("metadata_buf is NULL");
3879 return;
3880 }
3881 metadata_buffer_t *metadata =
3882 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3883 int32_t *p_frame_number_valid =
3884 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3885 uint32_t *p_frame_number =
3886 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3887
3888 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3889 LOGE("%s: Invalid metadata", __func__);
3890 return;
3891 }
3892
3893 //acquire perf lock for 5 sec after the last HDR frame is captured
3894 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3895 if ((p_frame_number != NULL) &&
3896 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003897 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003898 }
3899 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003900}
3901
3902/*===========================================================================
3903 * FUNCTION : handleInputBufferWithLock
3904 *
3905 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3906 *
3907 * PARAMETERS : @frame_number: frame number of the input buffer
3908 *
3909 * RETURN :
3910 *
3911 *==========================================================================*/
3912void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3913{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003914 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003915 pendingRequestIterator i = mPendingRequestsList.begin();
3916 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3917 i++;
3918 }
3919 if (i != mPendingRequestsList.end() && i->input_buffer) {
3920 //found the right request
3921 if (!i->shutter_notified) {
3922 CameraMetadata settings;
3923 camera3_notify_msg_t notify_msg;
3924 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3925 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3926 if(i->settings) {
3927 settings = i->settings;
3928 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3929 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3930 } else {
3931 LOGE("No timestamp in input settings! Using current one.");
3932 }
3933 } else {
3934 LOGE("Input settings missing!");
3935 }
3936
3937 notify_msg.type = CAMERA3_MSG_SHUTTER;
3938 notify_msg.message.shutter.frame_number = frame_number;
3939 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003940 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003941 i->shutter_notified = true;
3942 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3943 i->frame_number, notify_msg.message.shutter.timestamp);
3944 }
3945
3946 if (i->input_buffer->release_fence != -1) {
3947 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3948 close(i->input_buffer->release_fence);
3949 if (rc != OK) {
3950 LOGE("input buffer sync wait failed %d", rc);
3951 }
3952 }
3953
3954 camera3_capture_result result;
3955 memset(&result, 0, sizeof(camera3_capture_result));
3956 result.frame_number = frame_number;
3957 result.result = i->settings;
3958 result.input_buffer = i->input_buffer;
3959 result.partial_result = PARTIAL_RESULT_COUNT;
3960
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003961 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003962 LOGD("Input request metadata and input buffer frame_number = %u",
3963 i->frame_number);
3964 i = erasePendingRequest(i);
3965 } else {
3966 LOGE("Could not find input request for frame number %d", frame_number);
3967 }
3968}
3969
3970/*===========================================================================
3971 * FUNCTION : handleBufferWithLock
3972 *
3973 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3974 *
3975 * PARAMETERS : @buffer: image buffer for the callback
3976 * @frame_number: frame number of the image buffer
3977 *
3978 * RETURN :
3979 *
3980 *==========================================================================*/
3981void QCamera3HardwareInterface::handleBufferWithLock(
3982 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3983{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003984 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003985
3986 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3987 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3988 }
3989
Thierry Strudel3d639192016-09-09 11:52:26 -07003990 /* Nothing to be done during error state */
3991 if ((ERROR == mState) || (DEINIT == mState)) {
3992 return;
3993 }
3994 if (mFlushPerf) {
3995 handleBuffersDuringFlushLock(buffer);
3996 return;
3997 }
3998 //not in flush
3999 // If the frame number doesn't exist in the pending request list,
4000 // directly send the buffer to the frameworks, and update pending buffers map
4001 // Otherwise, book-keep the buffer.
4002 pendingRequestIterator i = mPendingRequestsList.begin();
4003 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4004 i++;
4005 }
4006 if (i == mPendingRequestsList.end()) {
4007 // Verify all pending requests frame_numbers are greater
4008 for (pendingRequestIterator j = mPendingRequestsList.begin();
4009 j != mPendingRequestsList.end(); j++) {
4010 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
4011 LOGW("Error: pending live frame number %d is smaller than %d",
4012 j->frame_number, frame_number);
4013 }
4014 }
4015 camera3_capture_result_t result;
4016 memset(&result, 0, sizeof(camera3_capture_result_t));
4017 result.result = NULL;
4018 result.frame_number = frame_number;
4019 result.num_output_buffers = 1;
4020 result.partial_result = 0;
4021 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4022 m != mPendingFrameDropList.end(); m++) {
4023 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4024 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4025 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4026 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4027 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4028 frame_number, streamID);
4029 m = mPendingFrameDropList.erase(m);
4030 break;
4031 }
4032 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004033 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07004034 result.output_buffers = buffer;
4035 LOGH("result frame_number = %d, buffer = %p",
4036 frame_number, buffer->buffer);
4037
4038 mPendingBuffersMap.removeBuf(buffer->buffer);
4039
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004040 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004041 } else {
4042 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004043 if (i->input_buffer->release_fence != -1) {
4044 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
4045 close(i->input_buffer->release_fence);
4046 if (rc != OK) {
4047 LOGE("input buffer sync wait failed %d", rc);
4048 }
4049 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004050 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004051
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004052 // Put buffer into the pending request
4053 for (auto &requestedBuffer : i->buffers) {
4054 if (requestedBuffer.stream == buffer->stream) {
4055 if (requestedBuffer.buffer != nullptr) {
4056 LOGE("Error: buffer is already set");
4057 } else {
4058 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
4059 sizeof(camera3_stream_buffer_t));
4060 *(requestedBuffer.buffer) = *buffer;
4061 LOGH("cache buffer %p at result frame_number %u",
4062 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07004063 }
4064 }
4065 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004066
4067 if (i->input_buffer) {
4068 // For a reprocessing request, try to send out shutter callback and result metadata.
4069 handlePendingResultsWithLock(frame_number, nullptr);
4070 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004071 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004072
4073 if (mPreviewStarted == false) {
4074 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4075 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004076 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4077
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004078 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4079 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4080 mPreviewStarted = true;
4081
4082 // Set power hint for preview
4083 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4084 }
4085 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004086}
4087
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004088void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4089 const camera_metadata_t *resultMetadata)
4090{
4091 // Find the pending request for this result metadata.
4092 auto requestIter = mPendingRequestsList.begin();
4093 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4094 requestIter++;
4095 }
4096
4097 if (requestIter == mPendingRequestsList.end()) {
4098 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4099 return;
4100 }
4101
4102 // Update the result metadata
4103 requestIter->resultMetadata = resultMetadata;
4104
4105 // Check what type of request this is.
4106 bool liveRequest = false;
4107 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004108 // HDR+ request doesn't have partial results.
4109 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004110 } else if (requestIter->input_buffer != nullptr) {
4111 // Reprocessing request result is the same as settings.
4112 requestIter->resultMetadata = requestIter->settings;
4113 // Reprocessing request doesn't have partial results.
4114 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4115 } else {
4116 liveRequest = true;
4117 requestIter->partial_result_cnt++;
4118 mPendingLiveRequest--;
4119
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004120 {
4121 Mutex::Autolock l(gHdrPlusClientLock);
4122 // For a live request, send the metadata to HDR+ client.
4123 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4124 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4125 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4126 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004127 }
4128 }
4129
4130 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4131 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4132 bool readyToSend = true;
4133
4134 // Iterate through the pending requests to send out shutter callbacks and results that are
4135 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4136 // live requests that don't have result metadata yet.
4137 auto iter = mPendingRequestsList.begin();
4138 while (iter != mPendingRequestsList.end()) {
4139 // Check if current pending request is ready. If it's not ready, the following pending
4140 // requests are also not ready.
4141 if (readyToSend && iter->resultMetadata == nullptr) {
4142 readyToSend = false;
4143 }
4144
4145 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4146
4147 std::vector<camera3_stream_buffer_t> outputBuffers;
4148
4149 camera3_capture_result_t result = {};
4150 result.frame_number = iter->frame_number;
4151 result.result = iter->resultMetadata;
4152 result.partial_result = iter->partial_result_cnt;
4153
4154 // If this pending buffer has result metadata, we may be able to send out shutter callback
4155 // and result metadata.
4156 if (iter->resultMetadata != nullptr) {
4157 if (!readyToSend) {
4158 // If any of the previous pending request is not ready, this pending request is
4159 // also not ready to send in order to keep shutter callbacks and result metadata
4160 // in order.
4161 iter++;
4162 continue;
4163 }
4164
4165 // Invoke shutter callback if not yet.
4166 if (!iter->shutter_notified) {
4167 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4168
4169 // Find the timestamp in HDR+ result metadata
4170 camera_metadata_ro_entry_t entry;
4171 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4172 ANDROID_SENSOR_TIMESTAMP, &entry);
4173 if (res != OK) {
4174 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4175 __FUNCTION__, iter->frame_number, strerror(-res), res);
4176 } else {
4177 timestamp = entry.data.i64[0];
4178 }
4179
4180 camera3_notify_msg_t notify_msg = {};
4181 notify_msg.type = CAMERA3_MSG_SHUTTER;
4182 notify_msg.message.shutter.frame_number = iter->frame_number;
4183 notify_msg.message.shutter.timestamp = timestamp;
4184 orchestrateNotify(&notify_msg);
4185 iter->shutter_notified = true;
4186 }
4187
4188 result.input_buffer = iter->input_buffer;
4189
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004190 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4191 // If the result metadata belongs to a live request, notify errors for previous pending
4192 // live requests.
4193 mPendingLiveRequest--;
4194
4195 CameraMetadata dummyMetadata;
4196 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4197 result.result = dummyMetadata.release();
4198
4199 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004200
4201 // partial_result should be PARTIAL_RESULT_CNT in case of
4202 // ERROR_RESULT.
4203 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4204 result.partial_result = PARTIAL_RESULT_COUNT;
4205
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004206 } else {
4207 iter++;
4208 continue;
4209 }
4210
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004211 // Prepare output buffer array
4212 for (auto bufferInfoIter = iter->buffers.begin();
4213 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4214 if (bufferInfoIter->buffer != nullptr) {
4215
4216 QCamera3Channel *channel =
4217 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4218 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4219
4220 // Check if this buffer is a dropped frame.
4221 auto frameDropIter = mPendingFrameDropList.begin();
4222 while (frameDropIter != mPendingFrameDropList.end()) {
4223 if((frameDropIter->stream_ID == streamID) &&
4224 (frameDropIter->frame_number == frameNumber)) {
4225 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4226 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4227 streamID);
4228 mPendingFrameDropList.erase(frameDropIter);
4229 break;
4230 } else {
4231 frameDropIter++;
4232 }
4233 }
4234
4235 // Check buffer error status
4236 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4237 bufferInfoIter->buffer->buffer);
4238 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4239
4240 outputBuffers.push_back(*(bufferInfoIter->buffer));
4241 free(bufferInfoIter->buffer);
4242 bufferInfoIter->buffer = NULL;
4243 }
4244 }
4245
4246 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4247 result.num_output_buffers = outputBuffers.size();
4248
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004249 orchestrateResult(&result);
4250
4251 // For reprocessing, result metadata is the same as settings so do not free it here to
4252 // avoid double free.
4253 if (result.result != iter->settings) {
4254 free_camera_metadata((camera_metadata_t *)result.result);
4255 }
4256 iter->resultMetadata = nullptr;
4257 iter = erasePendingRequest(iter);
4258 }
4259
4260 if (liveRequest) {
4261 for (auto &iter : mPendingRequestsList) {
4262 // Increment pipeline depth for the following pending requests.
4263 if (iter.frame_number > frameNumber) {
4264 iter.pipeline_depth++;
4265 }
4266 }
4267 }
4268
4269 unblockRequestIfNecessary();
4270}
4271
Thierry Strudel3d639192016-09-09 11:52:26 -07004272/*===========================================================================
4273 * FUNCTION : unblockRequestIfNecessary
4274 *
4275 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4276 * that mMutex is held when this function is called.
4277 *
4278 * PARAMETERS :
4279 *
4280 * RETURN :
4281 *
4282 *==========================================================================*/
4283void QCamera3HardwareInterface::unblockRequestIfNecessary()
4284{
4285 // Unblock process_capture_request
4286 pthread_cond_signal(&mRequestCond);
4287}
4288
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004289/*===========================================================================
4290 * FUNCTION : isHdrSnapshotRequest
4291 *
4292 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4293 *
4294 * PARAMETERS : camera3 request structure
4295 *
4296 * RETURN : boolean decision variable
4297 *
4298 *==========================================================================*/
4299bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4300{
4301 if (request == NULL) {
4302 LOGE("Invalid request handle");
4303 assert(0);
4304 return false;
4305 }
4306
4307 if (!mForceHdrSnapshot) {
4308 CameraMetadata frame_settings;
4309 frame_settings = request->settings;
4310
4311 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4312 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4313 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4314 return false;
4315 }
4316 } else {
4317 return false;
4318 }
4319
4320 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4321 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4322 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4323 return false;
4324 }
4325 } else {
4326 return false;
4327 }
4328 }
4329
4330 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4331 if (request->output_buffers[i].stream->format
4332 == HAL_PIXEL_FORMAT_BLOB) {
4333 return true;
4334 }
4335 }
4336
4337 return false;
4338}
4339/*===========================================================================
4340 * FUNCTION : orchestrateRequest
4341 *
4342 * DESCRIPTION: Orchestrates a capture request from camera service
4343 *
4344 * PARAMETERS :
4345 * @request : request from framework to process
4346 *
4347 * RETURN : Error status codes
4348 *
4349 *==========================================================================*/
4350int32_t QCamera3HardwareInterface::orchestrateRequest(
4351 camera3_capture_request_t *request)
4352{
4353
4354 uint32_t originalFrameNumber = request->frame_number;
4355 uint32_t originalOutputCount = request->num_output_buffers;
4356 const camera_metadata_t *original_settings = request->settings;
4357 List<InternalRequest> internallyRequestedStreams;
4358 List<InternalRequest> emptyInternalList;
4359
4360 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4361 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4362 uint32_t internalFrameNumber;
4363 CameraMetadata modified_meta;
4364
4365
4366 /* Add Blob channel to list of internally requested streams */
4367 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4368 if (request->output_buffers[i].stream->format
4369 == HAL_PIXEL_FORMAT_BLOB) {
4370 InternalRequest streamRequested;
4371 streamRequested.meteringOnly = 1;
4372 streamRequested.need_metadata = 0;
4373 streamRequested.stream = request->output_buffers[i].stream;
4374 internallyRequestedStreams.push_back(streamRequested);
4375 }
4376 }
4377 request->num_output_buffers = 0;
4378 auto itr = internallyRequestedStreams.begin();
4379
4380 /* Modify setting to set compensation */
4381 modified_meta = request->settings;
4382 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4383 uint8_t aeLock = 1;
4384 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4385 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4386 camera_metadata_t *modified_settings = modified_meta.release();
4387 request->settings = modified_settings;
4388
4389 /* Capture Settling & -2x frame */
4390 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4391 request->frame_number = internalFrameNumber;
4392 processCaptureRequest(request, internallyRequestedStreams);
4393
4394 request->num_output_buffers = originalOutputCount;
4395 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4396 request->frame_number = internalFrameNumber;
4397 processCaptureRequest(request, emptyInternalList);
4398 request->num_output_buffers = 0;
4399
4400 modified_meta = modified_settings;
4401 expCompensation = 0;
4402 aeLock = 1;
4403 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4404 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4405 modified_settings = modified_meta.release();
4406 request->settings = modified_settings;
4407
4408 /* Capture Settling & 0X frame */
4409
4410 itr = internallyRequestedStreams.begin();
4411 if (itr == internallyRequestedStreams.end()) {
4412 LOGE("Error Internally Requested Stream list is empty");
4413 assert(0);
4414 } else {
4415 itr->need_metadata = 0;
4416 itr->meteringOnly = 1;
4417 }
4418
4419 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4420 request->frame_number = internalFrameNumber;
4421 processCaptureRequest(request, internallyRequestedStreams);
4422
4423 itr = internallyRequestedStreams.begin();
4424 if (itr == internallyRequestedStreams.end()) {
4425 ALOGE("Error Internally Requested Stream list is empty");
4426 assert(0);
4427 } else {
4428 itr->need_metadata = 1;
4429 itr->meteringOnly = 0;
4430 }
4431
4432 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4433 request->frame_number = internalFrameNumber;
4434 processCaptureRequest(request, internallyRequestedStreams);
4435
4436 /* Capture 2X frame*/
4437 modified_meta = modified_settings;
4438 expCompensation = GB_HDR_2X_STEP_EV;
4439 aeLock = 1;
4440 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4441 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4442 modified_settings = modified_meta.release();
4443 request->settings = modified_settings;
4444
4445 itr = internallyRequestedStreams.begin();
4446 if (itr == internallyRequestedStreams.end()) {
4447 ALOGE("Error Internally Requested Stream list is empty");
4448 assert(0);
4449 } else {
4450 itr->need_metadata = 0;
4451 itr->meteringOnly = 1;
4452 }
4453 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4454 request->frame_number = internalFrameNumber;
4455 processCaptureRequest(request, internallyRequestedStreams);
4456
4457 itr = internallyRequestedStreams.begin();
4458 if (itr == internallyRequestedStreams.end()) {
4459 ALOGE("Error Internally Requested Stream list is empty");
4460 assert(0);
4461 } else {
4462 itr->need_metadata = 1;
4463 itr->meteringOnly = 0;
4464 }
4465
4466 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4467 request->frame_number = internalFrameNumber;
4468 processCaptureRequest(request, internallyRequestedStreams);
4469
4470
4471 /* Capture 2X on original streaming config*/
4472 internallyRequestedStreams.clear();
4473
4474 /* Restore original settings pointer */
4475 request->settings = original_settings;
4476 } else {
4477 uint32_t internalFrameNumber;
4478 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4479 request->frame_number = internalFrameNumber;
4480 return processCaptureRequest(request, internallyRequestedStreams);
4481 }
4482
4483 return NO_ERROR;
4484}
4485
4486/*===========================================================================
4487 * FUNCTION : orchestrateResult
4488 *
4489 * DESCRIPTION: Orchestrates a capture result to camera service
4490 *
4491 * PARAMETERS :
4492 * @request : request from framework to process
4493 *
4494 * RETURN :
4495 *
4496 *==========================================================================*/
4497void QCamera3HardwareInterface::orchestrateResult(
4498 camera3_capture_result_t *result)
4499{
4500 uint32_t frameworkFrameNumber;
4501 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4502 frameworkFrameNumber);
4503 if (rc != NO_ERROR) {
4504 LOGE("Cannot find translated frameworkFrameNumber");
4505 assert(0);
4506 } else {
4507 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004508 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004509 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004510 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004511 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4512 camera_metadata_entry_t entry;
4513 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4514 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004515 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004516 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4517 if (ret != OK)
4518 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004519 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004520 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004521 result->frame_number = frameworkFrameNumber;
4522 mCallbackOps->process_capture_result(mCallbackOps, result);
4523 }
4524 }
4525}
4526
4527/*===========================================================================
4528 * FUNCTION : orchestrateNotify
4529 *
4530 * DESCRIPTION: Orchestrates a notify to camera service
4531 *
4532 * PARAMETERS :
4533 * @request : request from framework to process
4534 *
4535 * RETURN :
4536 *
4537 *==========================================================================*/
4538void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4539{
4540 uint32_t frameworkFrameNumber;
4541 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004542 int32_t rc = NO_ERROR;
4543
4544 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004545 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004546
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004547 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004548 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4549 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4550 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004551 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004552 LOGE("Cannot find translated frameworkFrameNumber");
4553 assert(0);
4554 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004555 }
4556 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004557
4558 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4559 LOGD("Internal Request drop the notifyCb");
4560 } else {
4561 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4562 mCallbackOps->notify(mCallbackOps, notify_msg);
4563 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004564}
4565
4566/*===========================================================================
4567 * FUNCTION : FrameNumberRegistry
4568 *
4569 * DESCRIPTION: Constructor
4570 *
4571 * PARAMETERS :
4572 *
4573 * RETURN :
4574 *
4575 *==========================================================================*/
4576FrameNumberRegistry::FrameNumberRegistry()
4577{
4578 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4579}
4580
4581/*===========================================================================
4582 * FUNCTION : ~FrameNumberRegistry
4583 *
4584 * DESCRIPTION: Destructor
4585 *
4586 * PARAMETERS :
4587 *
4588 * RETURN :
4589 *
4590 *==========================================================================*/
4591FrameNumberRegistry::~FrameNumberRegistry()
4592{
4593}
4594
4595/*===========================================================================
4596 * FUNCTION : PurgeOldEntriesLocked
4597 *
4598 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4599 *
4600 * PARAMETERS :
4601 *
4602 * RETURN : NONE
4603 *
4604 *==========================================================================*/
4605void FrameNumberRegistry::purgeOldEntriesLocked()
4606{
4607 while (_register.begin() != _register.end()) {
4608 auto itr = _register.begin();
4609 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4610 _register.erase(itr);
4611 } else {
4612 return;
4613 }
4614 }
4615}
4616
4617/*===========================================================================
4618 * FUNCTION : allocStoreInternalFrameNumber
4619 *
4620 * DESCRIPTION: Method to note down a framework request and associate a new
4621 * internal request number against it
4622 *
4623 * PARAMETERS :
4624 * @fFrameNumber: Identifier given by framework
4625 * @internalFN : Output parameter which will have the newly generated internal
4626 * entry
4627 *
4628 * RETURN : Error code
4629 *
4630 *==========================================================================*/
4631int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4632 uint32_t &internalFrameNumber)
4633{
4634 Mutex::Autolock lock(mRegistryLock);
4635 internalFrameNumber = _nextFreeInternalNumber++;
4636 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4637 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4638 purgeOldEntriesLocked();
4639 return NO_ERROR;
4640}
4641
4642/*===========================================================================
4643 * FUNCTION : generateStoreInternalFrameNumber
4644 *
4645 * DESCRIPTION: Method to associate a new internal request number independent
4646 * of any associate with framework requests
4647 *
4648 * PARAMETERS :
4649 * @internalFrame#: Output parameter which will have the newly generated internal
4650 *
4651 *
4652 * RETURN : Error code
4653 *
4654 *==========================================================================*/
4655int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4656{
4657 Mutex::Autolock lock(mRegistryLock);
4658 internalFrameNumber = _nextFreeInternalNumber++;
4659 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4660 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4661 purgeOldEntriesLocked();
4662 return NO_ERROR;
4663}
4664
4665/*===========================================================================
4666 * FUNCTION : getFrameworkFrameNumber
4667 *
4668 * DESCRIPTION: Method to query the framework framenumber given an internal #
4669 *
4670 * PARAMETERS :
4671 * @internalFrame#: Internal reference
4672 * @frameworkframenumber: Output parameter holding framework frame entry
4673 *
4674 * RETURN : Error code
4675 *
4676 *==========================================================================*/
4677int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4678 uint32_t &frameworkFrameNumber)
4679{
4680 Mutex::Autolock lock(mRegistryLock);
4681 auto itr = _register.find(internalFrameNumber);
4682 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004683 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004684 return -ENOENT;
4685 }
4686
4687 frameworkFrameNumber = itr->second;
4688 purgeOldEntriesLocked();
4689 return NO_ERROR;
4690}
Thierry Strudel3d639192016-09-09 11:52:26 -07004691
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004692status_t QCamera3HardwareInterface::fillPbStreamConfig(
4693 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4694 QCamera3Channel *channel, uint32_t streamIndex) {
4695 if (config == nullptr) {
4696 LOGE("%s: config is null", __FUNCTION__);
4697 return BAD_VALUE;
4698 }
4699
4700 if (channel == nullptr) {
4701 LOGE("%s: channel is null", __FUNCTION__);
4702 return BAD_VALUE;
4703 }
4704
4705 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4706 if (stream == nullptr) {
4707 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4708 return NAME_NOT_FOUND;
4709 }
4710
4711 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4712 if (streamInfo == nullptr) {
4713 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4714 return NAME_NOT_FOUND;
4715 }
4716
4717 config->id = pbStreamId;
4718 config->image.width = streamInfo->dim.width;
4719 config->image.height = streamInfo->dim.height;
4720 config->image.padding = 0;
4721 config->image.format = pbStreamFormat;
4722
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004723 uint32_t totalPlaneSize = 0;
4724
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004725 // Fill plane information.
4726 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4727 pbcamera::PlaneConfiguration plane;
4728 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4729 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4730 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004731
4732 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004733 }
4734
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004735 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004736 return OK;
4737}
4738
Thierry Strudel3d639192016-09-09 11:52:26 -07004739/*===========================================================================
4740 * FUNCTION : processCaptureRequest
4741 *
4742 * DESCRIPTION: process a capture request from camera service
4743 *
4744 * PARAMETERS :
4745 * @request : request from framework to process
4746 *
4747 * RETURN :
4748 *
4749 *==========================================================================*/
4750int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004751 camera3_capture_request_t *request,
4752 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004753{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004754 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004755 int rc = NO_ERROR;
4756 int32_t request_id;
4757 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004758 bool isVidBufRequested = false;
4759 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004760 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004761
4762 pthread_mutex_lock(&mMutex);
4763
4764 // Validate current state
4765 switch (mState) {
4766 case CONFIGURED:
4767 case STARTED:
4768 /* valid state */
4769 break;
4770
4771 case ERROR:
4772 pthread_mutex_unlock(&mMutex);
4773 handleCameraDeviceError();
4774 return -ENODEV;
4775
4776 default:
4777 LOGE("Invalid state %d", mState);
4778 pthread_mutex_unlock(&mMutex);
4779 return -ENODEV;
4780 }
4781
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004782 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004783 if (rc != NO_ERROR) {
4784 LOGE("incoming request is not valid");
4785 pthread_mutex_unlock(&mMutex);
4786 return rc;
4787 }
4788
4789 meta = request->settings;
4790
4791 // For first capture request, send capture intent, and
4792 // stream on all streams
4793 if (mState == CONFIGURED) {
4794 // send an unconfigure to the backend so that the isp
4795 // resources are deallocated
4796 if (!mFirstConfiguration) {
4797 cam_stream_size_info_t stream_config_info;
4798 int32_t hal_version = CAM_HAL_V3;
4799 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4800 stream_config_info.buffer_info.min_buffers =
4801 MIN_INFLIGHT_REQUESTS;
4802 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004803 m_bIs4KVideo ? 0 :
4804 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004805 clear_metadata_buffer(mParameters);
4806 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4807 CAM_INTF_PARM_HAL_VERSION, hal_version);
4808 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4809 CAM_INTF_META_STREAM_INFO, stream_config_info);
4810 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4811 mParameters);
4812 if (rc < 0) {
4813 LOGE("set_parms for unconfigure failed");
4814 pthread_mutex_unlock(&mMutex);
4815 return rc;
4816 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004817
Thierry Strudel3d639192016-09-09 11:52:26 -07004818 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004819 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004820 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004821 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004822 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004823 property_get("persist.camera.is_type", is_type_value, "4");
4824 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4825 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4826 property_get("persist.camera.is_type_preview", is_type_value, "4");
4827 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4828 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004829
4830 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4831 int32_t hal_version = CAM_HAL_V3;
4832 uint8_t captureIntent =
4833 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4834 mCaptureIntent = captureIntent;
4835 clear_metadata_buffer(mParameters);
4836 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4837 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4838 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004839 if (mFirstConfiguration) {
4840 // configure instant AEC
4841 // Instant AEC is a session based parameter and it is needed only
4842 // once per complete session after open camera.
4843 // i.e. This is set only once for the first capture request, after open camera.
4844 setInstantAEC(meta);
4845 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004846 uint8_t fwkVideoStabMode=0;
4847 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4848 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4849 }
4850
Xue Tuecac74e2017-04-17 13:58:15 -07004851 // If EIS setprop is enabled then only turn it on for video/preview
4852 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004853 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004854 int32_t vsMode;
4855 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4856 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4857 rc = BAD_VALUE;
4858 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004859 LOGD("setEis %d", setEis);
4860 bool eis3Supported = false;
4861 size_t count = IS_TYPE_MAX;
4862 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4863 for (size_t i = 0; i < count; i++) {
4864 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4865 eis3Supported = true;
4866 break;
4867 }
4868 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004869
4870 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004871 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004872 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4873 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004874 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4875 is_type = isTypePreview;
4876 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4877 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4878 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004879 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004880 } else {
4881 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004882 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004883 } else {
4884 is_type = IS_TYPE_NONE;
4885 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004886 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004887 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004888 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4889 }
4890 }
4891
4892 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4893 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4894
Thierry Strudel54dc9782017-02-15 12:12:10 -08004895 //Disable tintless only if the property is set to 0
4896 memset(prop, 0, sizeof(prop));
4897 property_get("persist.camera.tintless.enable", prop, "1");
4898 int32_t tintless_value = atoi(prop);
4899
Thierry Strudel3d639192016-09-09 11:52:26 -07004900 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4901 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004902
Thierry Strudel3d639192016-09-09 11:52:26 -07004903 //Disable CDS for HFR mode or if DIS/EIS is on.
4904 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4905 //after every configure_stream
4906 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4907 (m_bIsVideo)) {
4908 int32_t cds = CAM_CDS_MODE_OFF;
4909 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4910 CAM_INTF_PARM_CDS_MODE, cds))
4911 LOGE("Failed to disable CDS for HFR mode");
4912
4913 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004914
4915 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4916 uint8_t* use_av_timer = NULL;
4917
4918 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004919 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004920 use_av_timer = &m_debug_avtimer;
4921 }
4922 else{
4923 use_av_timer =
4924 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004925 if (use_av_timer) {
4926 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4927 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004928 }
4929
4930 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4931 rc = BAD_VALUE;
4932 }
4933 }
4934
Thierry Strudel3d639192016-09-09 11:52:26 -07004935 setMobicat();
4936
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004937 uint8_t nrMode = 0;
4938 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4939 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4940 }
4941
Thierry Strudel3d639192016-09-09 11:52:26 -07004942 /* Set fps and hfr mode while sending meta stream info so that sensor
4943 * can configure appropriate streaming mode */
4944 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004945 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4946 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004947 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4948 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004949 if (rc == NO_ERROR) {
4950 int32_t max_fps =
4951 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004952 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004953 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4954 }
4955 /* For HFR, more buffers are dequeued upfront to improve the performance */
4956 if (mBatchSize) {
4957 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4958 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4959 }
4960 }
4961 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004962 LOGE("setHalFpsRange failed");
4963 }
4964 }
4965 if (meta.exists(ANDROID_CONTROL_MODE)) {
4966 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4967 rc = extractSceneMode(meta, metaMode, mParameters);
4968 if (rc != NO_ERROR) {
4969 LOGE("extractSceneMode failed");
4970 }
4971 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004972 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004973
Thierry Strudel04e026f2016-10-10 11:27:36 -07004974 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4975 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4976 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4977 rc = setVideoHdrMode(mParameters, vhdr);
4978 if (rc != NO_ERROR) {
4979 LOGE("setVideoHDR is failed");
4980 }
4981 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004982
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004983 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004984 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07004985 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004986 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4987 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4988 sensorModeFullFov)) {
4989 rc = BAD_VALUE;
4990 }
4991 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004992 //TODO: validate the arguments, HSV scenemode should have only the
4993 //advertised fps ranges
4994
4995 /*set the capture intent, hal version, tintless, stream info,
4996 *and disenable parameters to the backend*/
4997 LOGD("set_parms META_STREAM_INFO " );
4998 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004999 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5000 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005001 mStreamConfigInfo.type[i],
5002 mStreamConfigInfo.stream_sizes[i].width,
5003 mStreamConfigInfo.stream_sizes[i].height,
5004 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005005 mStreamConfigInfo.format[i],
5006 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005007 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005008
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5010 mParameters);
5011 if (rc < 0) {
5012 LOGE("set_parms failed for hal version, stream info");
5013 }
5014
Chien-Yu Chenee335912017-02-09 17:53:20 -08005015 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5016 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005017 if (rc != NO_ERROR) {
5018 LOGE("Failed to get sensor output size");
5019 pthread_mutex_unlock(&mMutex);
5020 goto error_exit;
5021 }
5022
5023 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5024 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08005025 mSensorModeInfo.active_array_size.width,
5026 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005027
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005028 {
5029 Mutex::Autolock l(gHdrPlusClientLock);
5030 if (EaselManagerClientOpened) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07005031 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005032 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
5033 if (rc != OK) {
5034 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5035 mCameraId, mSensorModeInfo.op_pixel_clk);
5036 pthread_mutex_unlock(&mMutex);
5037 goto error_exit;
5038 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08005039 }
5040 }
5041
Thierry Strudel3d639192016-09-09 11:52:26 -07005042 /* Set batchmode before initializing channel. Since registerBuffer
5043 * internally initializes some of the channels, better set batchmode
5044 * even before first register buffer */
5045 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5046 it != mStreamInfo.end(); it++) {
5047 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5048 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5049 && mBatchSize) {
5050 rc = channel->setBatchSize(mBatchSize);
5051 //Disable per frame map unmap for HFR/batchmode case
5052 rc |= channel->setPerFrameMapUnmap(false);
5053 if (NO_ERROR != rc) {
5054 LOGE("Channel init failed %d", rc);
5055 pthread_mutex_unlock(&mMutex);
5056 goto error_exit;
5057 }
5058 }
5059 }
5060
5061 //First initialize all streams
5062 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5063 it != mStreamInfo.end(); it++) {
5064 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005065
5066 /* Initial value of NR mode is needed before stream on */
5067 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005068 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5069 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005070 setEis) {
5071 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5072 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5073 is_type = mStreamConfigInfo.is_type[i];
5074 break;
5075 }
5076 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005077 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005078 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005079 rc = channel->initialize(IS_TYPE_NONE);
5080 }
5081 if (NO_ERROR != rc) {
5082 LOGE("Channel initialization failed %d", rc);
5083 pthread_mutex_unlock(&mMutex);
5084 goto error_exit;
5085 }
5086 }
5087
5088 if (mRawDumpChannel) {
5089 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5090 if (rc != NO_ERROR) {
5091 LOGE("Error: Raw Dump Channel init failed");
5092 pthread_mutex_unlock(&mMutex);
5093 goto error_exit;
5094 }
5095 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005096 if (mHdrPlusRawSrcChannel) {
5097 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5098 if (rc != NO_ERROR) {
5099 LOGE("Error: HDR+ RAW Source Channel init failed");
5100 pthread_mutex_unlock(&mMutex);
5101 goto error_exit;
5102 }
5103 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005104 if (mSupportChannel) {
5105 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5106 if (rc < 0) {
5107 LOGE("Support channel initialization failed");
5108 pthread_mutex_unlock(&mMutex);
5109 goto error_exit;
5110 }
5111 }
5112 if (mAnalysisChannel) {
5113 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5114 if (rc < 0) {
5115 LOGE("Analysis channel initialization failed");
5116 pthread_mutex_unlock(&mMutex);
5117 goto error_exit;
5118 }
5119 }
5120 if (mDummyBatchChannel) {
5121 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5122 if (rc < 0) {
5123 LOGE("mDummyBatchChannel setBatchSize failed");
5124 pthread_mutex_unlock(&mMutex);
5125 goto error_exit;
5126 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005127 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005128 if (rc < 0) {
5129 LOGE("mDummyBatchChannel initialization failed");
5130 pthread_mutex_unlock(&mMutex);
5131 goto error_exit;
5132 }
5133 }
5134
5135 // Set bundle info
5136 rc = setBundleInfo();
5137 if (rc < 0) {
5138 LOGE("setBundleInfo failed %d", rc);
5139 pthread_mutex_unlock(&mMutex);
5140 goto error_exit;
5141 }
5142
5143 //update settings from app here
5144 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5145 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5146 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5147 }
5148 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5149 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5150 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5151 }
5152 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5153 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5154 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5155
5156 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5157 (mLinkedCameraId != mCameraId) ) {
5158 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5159 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005160 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005161 goto error_exit;
5162 }
5163 }
5164
5165 // add bundle related cameras
5166 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5167 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005168 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5169 &m_pDualCamCmdPtr->bundle_info;
5170 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005171 if (mIsDeviceLinked)
5172 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5173 else
5174 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5175
5176 pthread_mutex_lock(&gCamLock);
5177
5178 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5179 LOGE("Dualcam: Invalid Session Id ");
5180 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005181 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005182 goto error_exit;
5183 }
5184
5185 if (mIsMainCamera == 1) {
5186 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5187 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005188 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005189 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005190 // related session id should be session id of linked session
5191 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5192 } else {
5193 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5194 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005195 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005196 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005197 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5198 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005199 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005200 pthread_mutex_unlock(&gCamLock);
5201
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005202 rc = mCameraHandle->ops->set_dual_cam_cmd(
5203 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005204 if (rc < 0) {
5205 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005206 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005207 goto error_exit;
5208 }
5209 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005210 goto no_error;
5211error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005212 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005213 return rc;
5214no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005215 mWokenUpByDaemon = false;
5216 mPendingLiveRequest = 0;
5217 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005218 }
5219
Chien-Yu Chenee335912017-02-09 17:53:20 -08005220 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chened0a4c92017-05-01 18:25:03 +00005221 {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005222 Mutex::Autolock l(gHdrPlusClientLock);
5223 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5224 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5225 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5226 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5227 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5228 rc = enableHdrPlusModeLocked();
Chien-Yu Chenee335912017-02-09 17:53:20 -08005229 if (rc != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005230 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -08005231 pthread_mutex_unlock(&mMutex);
5232 return rc;
5233 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005234
5235 mFirstPreviewIntentSeen = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -08005236 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08005237 }
5238
Thierry Strudel3d639192016-09-09 11:52:26 -07005239 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005240 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005241
5242 if (mFlushPerf) {
5243 //we cannot accept any requests during flush
5244 LOGE("process_capture_request cannot proceed during flush");
5245 pthread_mutex_unlock(&mMutex);
5246 return NO_ERROR; //should return an error
5247 }
5248
5249 if (meta.exists(ANDROID_REQUEST_ID)) {
5250 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5251 mCurrentRequestId = request_id;
5252 LOGD("Received request with id: %d", request_id);
5253 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5254 LOGE("Unable to find request id field, \
5255 & no previous id available");
5256 pthread_mutex_unlock(&mMutex);
5257 return NAME_NOT_FOUND;
5258 } else {
5259 LOGD("Re-using old request id");
5260 request_id = mCurrentRequestId;
5261 }
5262
5263 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5264 request->num_output_buffers,
5265 request->input_buffer,
5266 frameNumber);
5267 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005268 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005269 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005270 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005271 uint32_t snapshotStreamId = 0;
5272 for (size_t i = 0; i < request->num_output_buffers; i++) {
5273 const camera3_stream_buffer_t& output = request->output_buffers[i];
5274 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5275
Emilian Peev7650c122017-01-19 08:24:33 -08005276 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5277 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005278 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005279 blob_request = 1;
5280 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5281 }
5282
5283 if (output.acquire_fence != -1) {
5284 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5285 close(output.acquire_fence);
5286 if (rc != OK) {
5287 LOGE("sync wait failed %d", rc);
5288 pthread_mutex_unlock(&mMutex);
5289 return rc;
5290 }
5291 }
5292
Emilian Peev0f3c3162017-03-15 12:57:46 +00005293 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5294 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005295 depthRequestPresent = true;
5296 continue;
5297 }
5298
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005299 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005300 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005301
5302 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5303 isVidBufRequested = true;
5304 }
5305 }
5306
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005307 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5308 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5309 itr++) {
5310 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5311 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5312 channel->getStreamID(channel->getStreamTypeMask());
5313
5314 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5315 isVidBufRequested = true;
5316 }
5317 }
5318
Thierry Strudel3d639192016-09-09 11:52:26 -07005319 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005320 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005321 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005322 }
5323 if (blob_request && mRawDumpChannel) {
5324 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005325 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005326 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005327 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005328 }
5329
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005330 {
5331 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5332 // Request a RAW buffer if
5333 // 1. mHdrPlusRawSrcChannel is valid.
5334 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5335 // 3. There is no pending HDR+ request.
5336 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5337 mHdrPlusPendingRequests.size() == 0) {
5338 streamsArray.stream_request[streamsArray.num_streams].streamID =
5339 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5340 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5341 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005342 }
5343
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005344 //extract capture intent
5345 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5346 mCaptureIntent =
5347 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5348 }
5349
5350 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5351 mCacMode =
5352 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5353 }
5354
5355 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005356 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005357
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005358 {
5359 Mutex::Autolock l(gHdrPlusClientLock);
5360 // If this request has a still capture intent, try to submit an HDR+ request.
5361 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5362 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5363 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5364 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005365 }
5366
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005367 if (hdrPlusRequest) {
5368 // For a HDR+ request, just set the frame parameters.
5369 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5370 if (rc < 0) {
5371 LOGE("fail to set frame parameters");
5372 pthread_mutex_unlock(&mMutex);
5373 return rc;
5374 }
5375 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005376 /* Parse the settings:
5377 * - For every request in NORMAL MODE
5378 * - For every request in HFR mode during preview only case
5379 * - For first request of every batch in HFR mode during video
5380 * recording. In batchmode the same settings except frame number is
5381 * repeated in each request of the batch.
5382 */
5383 if (!mBatchSize ||
5384 (mBatchSize && !isVidBufRequested) ||
5385 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005386 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005387 if (rc < 0) {
5388 LOGE("fail to set frame parameters");
5389 pthread_mutex_unlock(&mMutex);
5390 return rc;
5391 }
5392 }
5393 /* For batchMode HFR, setFrameParameters is not called for every
5394 * request. But only frame number of the latest request is parsed.
5395 * Keep track of first and last frame numbers in a batch so that
5396 * metadata for the frame numbers of batch can be duplicated in
5397 * handleBatchMetadta */
5398 if (mBatchSize) {
5399 if (!mToBeQueuedVidBufs) {
5400 //start of the batch
5401 mFirstFrameNumberInBatch = request->frame_number;
5402 }
5403 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5404 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5405 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005406 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005407 return BAD_VALUE;
5408 }
5409 }
5410 if (mNeedSensorRestart) {
5411 /* Unlock the mutex as restartSensor waits on the channels to be
5412 * stopped, which in turn calls stream callback functions -
5413 * handleBufferWithLock and handleMetadataWithLock */
5414 pthread_mutex_unlock(&mMutex);
5415 rc = dynamicUpdateMetaStreamInfo();
5416 if (rc != NO_ERROR) {
5417 LOGE("Restarting the sensor failed");
5418 return BAD_VALUE;
5419 }
5420 mNeedSensorRestart = false;
5421 pthread_mutex_lock(&mMutex);
5422 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005423 if(mResetInstantAEC) {
5424 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5425 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5426 mResetInstantAEC = false;
5427 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005428 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005429 if (request->input_buffer->acquire_fence != -1) {
5430 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5431 close(request->input_buffer->acquire_fence);
5432 if (rc != OK) {
5433 LOGE("input buffer sync wait failed %d", rc);
5434 pthread_mutex_unlock(&mMutex);
5435 return rc;
5436 }
5437 }
5438 }
5439
5440 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5441 mLastCustIntentFrmNum = frameNumber;
5442 }
5443 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005444 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005445 pendingRequestIterator latestRequest;
5446 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005447 pendingRequest.num_buffers = depthRequestPresent ?
5448 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005449 pendingRequest.request_id = request_id;
5450 pendingRequest.blob_request = blob_request;
5451 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005452 if (request->input_buffer) {
5453 pendingRequest.input_buffer =
5454 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5455 *(pendingRequest.input_buffer) = *(request->input_buffer);
5456 pInputBuffer = pendingRequest.input_buffer;
5457 } else {
5458 pendingRequest.input_buffer = NULL;
5459 pInputBuffer = NULL;
5460 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005461 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005462
5463 pendingRequest.pipeline_depth = 0;
5464 pendingRequest.partial_result_cnt = 0;
5465 extractJpegMetadata(mCurJpegMeta, request);
5466 pendingRequest.jpegMetadata = mCurJpegMeta;
5467 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5468 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005469 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005470 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5471 mHybridAeEnable =
5472 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5473 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005474
5475 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5476 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005477 /* DevCamDebug metadata processCaptureRequest */
5478 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5479 mDevCamDebugMetaEnable =
5480 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5481 }
5482 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5483 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005484
5485 //extract CAC info
5486 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5487 mCacMode =
5488 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5489 }
5490 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005491 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005492
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005493 // extract enableZsl info
5494 if (gExposeEnableZslKey) {
5495 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5496 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5497 mZslEnabled = pendingRequest.enableZsl;
5498 } else {
5499 pendingRequest.enableZsl = mZslEnabled;
5500 }
5501 }
5502
Thierry Strudel3d639192016-09-09 11:52:26 -07005503 PendingBuffersInRequest bufsForCurRequest;
5504 bufsForCurRequest.frame_number = frameNumber;
5505 // Mark current timestamp for the new request
5506 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005507 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005508
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005509 if (hdrPlusRequest) {
5510 // Save settings for this request.
5511 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5512 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5513
5514 // Add to pending HDR+ request queue.
5515 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5516 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5517
5518 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5519 }
5520
Thierry Strudel3d639192016-09-09 11:52:26 -07005521 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005522 if ((request->output_buffers[i].stream->data_space ==
5523 HAL_DATASPACE_DEPTH) &&
5524 (HAL_PIXEL_FORMAT_BLOB ==
5525 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005526 continue;
5527 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005528 RequestedBufferInfo requestedBuf;
5529 memset(&requestedBuf, 0, sizeof(requestedBuf));
5530 requestedBuf.stream = request->output_buffers[i].stream;
5531 requestedBuf.buffer = NULL;
5532 pendingRequest.buffers.push_back(requestedBuf);
5533
5534 // Add to buffer handle the pending buffers list
5535 PendingBufferInfo bufferInfo;
5536 bufferInfo.buffer = request->output_buffers[i].buffer;
5537 bufferInfo.stream = request->output_buffers[i].stream;
5538 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5539 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5540 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5541 frameNumber, bufferInfo.buffer,
5542 channel->getStreamTypeMask(), bufferInfo.stream->format);
5543 }
5544 // Add this request packet into mPendingBuffersMap
5545 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5546 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5547 mPendingBuffersMap.get_num_overall_buffers());
5548
5549 latestRequest = mPendingRequestsList.insert(
5550 mPendingRequestsList.end(), pendingRequest);
5551 if(mFlush) {
5552 LOGI("mFlush is true");
5553 pthread_mutex_unlock(&mMutex);
5554 return NO_ERROR;
5555 }
5556
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005557 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5558 // channel.
5559 if (!hdrPlusRequest) {
5560 int indexUsed;
5561 // Notify metadata channel we receive a request
5562 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005563
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005564 if(request->input_buffer != NULL){
5565 LOGD("Input request, frame_number %d", frameNumber);
5566 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5567 if (NO_ERROR != rc) {
5568 LOGE("fail to set reproc parameters");
5569 pthread_mutex_unlock(&mMutex);
5570 return rc;
5571 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005572 }
5573
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005574 // Call request on other streams
5575 uint32_t streams_need_metadata = 0;
5576 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5577 for (size_t i = 0; i < request->num_output_buffers; i++) {
5578 const camera3_stream_buffer_t& output = request->output_buffers[i];
5579 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5580
5581 if (channel == NULL) {
5582 LOGW("invalid channel pointer for stream");
5583 continue;
5584 }
5585
5586 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5587 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5588 output.buffer, request->input_buffer, frameNumber);
5589 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005590 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005591 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5592 if (rc < 0) {
5593 LOGE("Fail to request on picture channel");
5594 pthread_mutex_unlock(&mMutex);
5595 return rc;
5596 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005597 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005598 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5599 assert(NULL != mDepthChannel);
5600 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005601
Emilian Peev7650c122017-01-19 08:24:33 -08005602 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5603 if (rc < 0) {
5604 LOGE("Fail to map on depth buffer");
5605 pthread_mutex_unlock(&mMutex);
5606 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005607 }
Emilian Peev7650c122017-01-19 08:24:33 -08005608 } else {
5609 LOGD("snapshot request with buffer %p, frame_number %d",
5610 output.buffer, frameNumber);
5611 if (!request->settings) {
5612 rc = channel->request(output.buffer, frameNumber,
5613 NULL, mPrevParameters, indexUsed);
5614 } else {
5615 rc = channel->request(output.buffer, frameNumber,
5616 NULL, mParameters, indexUsed);
5617 }
5618 if (rc < 0) {
5619 LOGE("Fail to request on picture channel");
5620 pthread_mutex_unlock(&mMutex);
5621 return rc;
5622 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005623
Emilian Peev7650c122017-01-19 08:24:33 -08005624 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5625 uint32_t j = 0;
5626 for (j = 0; j < streamsArray.num_streams; j++) {
5627 if (streamsArray.stream_request[j].streamID == streamId) {
5628 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5629 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5630 else
5631 streamsArray.stream_request[j].buf_index = indexUsed;
5632 break;
5633 }
5634 }
5635 if (j == streamsArray.num_streams) {
5636 LOGE("Did not find matching stream to update index");
5637 assert(0);
5638 }
5639
5640 pendingBufferIter->need_metadata = true;
5641 streams_need_metadata++;
5642 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005643 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005644 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5645 bool needMetadata = false;
5646 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5647 rc = yuvChannel->request(output.buffer, frameNumber,
5648 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5649 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005650 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005651 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005652 pthread_mutex_unlock(&mMutex);
5653 return rc;
5654 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005655
5656 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5657 uint32_t j = 0;
5658 for (j = 0; j < streamsArray.num_streams; j++) {
5659 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005660 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5661 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5662 else
5663 streamsArray.stream_request[j].buf_index = indexUsed;
5664 break;
5665 }
5666 }
5667 if (j == streamsArray.num_streams) {
5668 LOGE("Did not find matching stream to update index");
5669 assert(0);
5670 }
5671
5672 pendingBufferIter->need_metadata = needMetadata;
5673 if (needMetadata)
5674 streams_need_metadata += 1;
5675 LOGD("calling YUV channel request, need_metadata is %d",
5676 needMetadata);
5677 } else {
5678 LOGD("request with buffer %p, frame_number %d",
5679 output.buffer, frameNumber);
5680
5681 rc = channel->request(output.buffer, frameNumber, indexUsed);
5682
5683 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5684 uint32_t j = 0;
5685 for (j = 0; j < streamsArray.num_streams; j++) {
5686 if (streamsArray.stream_request[j].streamID == streamId) {
5687 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5688 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5689 else
5690 streamsArray.stream_request[j].buf_index = indexUsed;
5691 break;
5692 }
5693 }
5694 if (j == streamsArray.num_streams) {
5695 LOGE("Did not find matching stream to update index");
5696 assert(0);
5697 }
5698
5699 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5700 && mBatchSize) {
5701 mToBeQueuedVidBufs++;
5702 if (mToBeQueuedVidBufs == mBatchSize) {
5703 channel->queueBatchBuf();
5704 }
5705 }
5706 if (rc < 0) {
5707 LOGE("request failed");
5708 pthread_mutex_unlock(&mMutex);
5709 return rc;
5710 }
5711 }
5712 pendingBufferIter++;
5713 }
5714
5715 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5716 itr++) {
5717 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5718
5719 if (channel == NULL) {
5720 LOGE("invalid channel pointer for stream");
5721 assert(0);
5722 return BAD_VALUE;
5723 }
5724
5725 InternalRequest requestedStream;
5726 requestedStream = (*itr);
5727
5728
5729 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5730 LOGD("snapshot request internally input buffer %p, frame_number %d",
5731 request->input_buffer, frameNumber);
5732 if(request->input_buffer != NULL){
5733 rc = channel->request(NULL, frameNumber,
5734 pInputBuffer, &mReprocMeta, indexUsed, true,
5735 requestedStream.meteringOnly);
5736 if (rc < 0) {
5737 LOGE("Fail to request on picture channel");
5738 pthread_mutex_unlock(&mMutex);
5739 return rc;
5740 }
5741 } else {
5742 LOGD("snapshot request with frame_number %d", frameNumber);
5743 if (!request->settings) {
5744 rc = channel->request(NULL, frameNumber,
5745 NULL, mPrevParameters, indexUsed, true,
5746 requestedStream.meteringOnly);
5747 } else {
5748 rc = channel->request(NULL, frameNumber,
5749 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5750 }
5751 if (rc < 0) {
5752 LOGE("Fail to request on picture channel");
5753 pthread_mutex_unlock(&mMutex);
5754 return rc;
5755 }
5756
5757 if ((*itr).meteringOnly != 1) {
5758 requestedStream.need_metadata = 1;
5759 streams_need_metadata++;
5760 }
5761 }
5762
5763 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5764 uint32_t j = 0;
5765 for (j = 0; j < streamsArray.num_streams; j++) {
5766 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005767 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5768 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5769 else
5770 streamsArray.stream_request[j].buf_index = indexUsed;
5771 break;
5772 }
5773 }
5774 if (j == streamsArray.num_streams) {
5775 LOGE("Did not find matching stream to update index");
5776 assert(0);
5777 }
5778
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005779 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005780 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005781 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005782 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005783 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005784 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005785 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005786
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005787 //If 2 streams have need_metadata set to true, fail the request, unless
5788 //we copy/reference count the metadata buffer
5789 if (streams_need_metadata > 1) {
5790 LOGE("not supporting request in which two streams requires"
5791 " 2 HAL metadata for reprocessing");
5792 pthread_mutex_unlock(&mMutex);
5793 return -EINVAL;
5794 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005795
Emilian Peev7650c122017-01-19 08:24:33 -08005796 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5797 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5798 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5799 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5800 pthread_mutex_unlock(&mMutex);
5801 return BAD_VALUE;
5802 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005803 if (request->input_buffer == NULL) {
5804 /* Set the parameters to backend:
5805 * - For every request in NORMAL MODE
5806 * - For every request in HFR mode during preview only case
5807 * - Once every batch in HFR mode during video recording
5808 */
5809 if (!mBatchSize ||
5810 (mBatchSize && !isVidBufRequested) ||
5811 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5812 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5813 mBatchSize, isVidBufRequested,
5814 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005815
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005816 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5817 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5818 uint32_t m = 0;
5819 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5820 if (streamsArray.stream_request[k].streamID ==
5821 mBatchedStreamsArray.stream_request[m].streamID)
5822 break;
5823 }
5824 if (m == mBatchedStreamsArray.num_streams) {
5825 mBatchedStreamsArray.stream_request\
5826 [mBatchedStreamsArray.num_streams].streamID =
5827 streamsArray.stream_request[k].streamID;
5828 mBatchedStreamsArray.stream_request\
5829 [mBatchedStreamsArray.num_streams].buf_index =
5830 streamsArray.stream_request[k].buf_index;
5831 mBatchedStreamsArray.num_streams =
5832 mBatchedStreamsArray.num_streams + 1;
5833 }
5834 }
5835 streamsArray = mBatchedStreamsArray;
5836 }
5837 /* Update stream id of all the requested buffers */
5838 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5839 streamsArray)) {
5840 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005841 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005842 return BAD_VALUE;
5843 }
5844
5845 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5846 mParameters);
5847 if (rc < 0) {
5848 LOGE("set_parms failed");
5849 }
5850 /* reset to zero coz, the batch is queued */
5851 mToBeQueuedVidBufs = 0;
5852 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5853 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5854 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005855 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5856 uint32_t m = 0;
5857 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5858 if (streamsArray.stream_request[k].streamID ==
5859 mBatchedStreamsArray.stream_request[m].streamID)
5860 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005861 }
5862 if (m == mBatchedStreamsArray.num_streams) {
5863 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5864 streamID = streamsArray.stream_request[k].streamID;
5865 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5866 buf_index = streamsArray.stream_request[k].buf_index;
5867 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5868 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005869 }
5870 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005871 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005872
5873 // Start all streams after the first setting is sent, so that the
5874 // setting can be applied sooner: (0 + apply_delay)th frame.
5875 if (mState == CONFIGURED && mChannelHandle) {
5876 //Then start them.
5877 LOGH("Start META Channel");
5878 rc = mMetadataChannel->start();
5879 if (rc < 0) {
5880 LOGE("META channel start failed");
5881 pthread_mutex_unlock(&mMutex);
5882 return rc;
5883 }
5884
5885 if (mAnalysisChannel) {
5886 rc = mAnalysisChannel->start();
5887 if (rc < 0) {
5888 LOGE("Analysis channel start failed");
5889 mMetadataChannel->stop();
5890 pthread_mutex_unlock(&mMutex);
5891 return rc;
5892 }
5893 }
5894
5895 if (mSupportChannel) {
5896 rc = mSupportChannel->start();
5897 if (rc < 0) {
5898 LOGE("Support channel start failed");
5899 mMetadataChannel->stop();
5900 /* Although support and analysis are mutually exclusive today
5901 adding it in anycase for future proofing */
5902 if (mAnalysisChannel) {
5903 mAnalysisChannel->stop();
5904 }
5905 pthread_mutex_unlock(&mMutex);
5906 return rc;
5907 }
5908 }
5909 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5910 it != mStreamInfo.end(); it++) {
5911 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5912 LOGH("Start Processing Channel mask=%d",
5913 channel->getStreamTypeMask());
5914 rc = channel->start();
5915 if (rc < 0) {
5916 LOGE("channel start failed");
5917 pthread_mutex_unlock(&mMutex);
5918 return rc;
5919 }
5920 }
5921
5922 if (mRawDumpChannel) {
5923 LOGD("Starting raw dump stream");
5924 rc = mRawDumpChannel->start();
5925 if (rc != NO_ERROR) {
5926 LOGE("Error Starting Raw Dump Channel");
5927 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5928 it != mStreamInfo.end(); it++) {
5929 QCamera3Channel *channel =
5930 (QCamera3Channel *)(*it)->stream->priv;
5931 LOGH("Stopping Processing Channel mask=%d",
5932 channel->getStreamTypeMask());
5933 channel->stop();
5934 }
5935 if (mSupportChannel)
5936 mSupportChannel->stop();
5937 if (mAnalysisChannel) {
5938 mAnalysisChannel->stop();
5939 }
5940 mMetadataChannel->stop();
5941 pthread_mutex_unlock(&mMutex);
5942 return rc;
5943 }
5944 }
5945
5946 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5947 mChannelHandle);
5948 if (rc != NO_ERROR) {
5949 LOGE("start_channel failed %d", rc);
5950 pthread_mutex_unlock(&mMutex);
5951 return rc;
5952 }
5953 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005954 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005955 }
5956
5957 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5958
5959 mState = STARTED;
5960 // Added a timed condition wait
5961 struct timespec ts;
5962 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005963 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005964 if (rc < 0) {
5965 isValidTimeout = 0;
5966 LOGE("Error reading the real time clock!!");
5967 }
5968 else {
5969 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005970 int64_t timeout = 5;
5971 {
5972 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5973 // If there is a pending HDR+ request, the following requests may be blocked until the
5974 // HDR+ request is done. So allow a longer timeout.
5975 if (mHdrPlusPendingRequests.size() > 0) {
5976 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5977 }
5978 }
5979 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005980 }
5981 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005982 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005983 (mState != ERROR) && (mState != DEINIT)) {
5984 if (!isValidTimeout) {
5985 LOGD("Blocking on conditional wait");
5986 pthread_cond_wait(&mRequestCond, &mMutex);
5987 }
5988 else {
5989 LOGD("Blocking on timed conditional wait");
5990 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5991 if (rc == ETIMEDOUT) {
5992 rc = -ENODEV;
5993 LOGE("Unblocked on timeout!!!!");
5994 break;
5995 }
5996 }
5997 LOGD("Unblocked");
5998 if (mWokenUpByDaemon) {
5999 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006000 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006001 break;
6002 }
6003 }
6004 pthread_mutex_unlock(&mMutex);
6005
6006 return rc;
6007}
6008
6009/*===========================================================================
6010 * FUNCTION : dump
6011 *
6012 * DESCRIPTION:
6013 *
6014 * PARAMETERS :
6015 *
6016 *
6017 * RETURN :
6018 *==========================================================================*/
6019void QCamera3HardwareInterface::dump(int fd)
6020{
6021 pthread_mutex_lock(&mMutex);
6022 dprintf(fd, "\n Camera HAL3 information Begin \n");
6023
6024 dprintf(fd, "\nNumber of pending requests: %zu \n",
6025 mPendingRequestsList.size());
6026 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6027 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6028 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6029 for(pendingRequestIterator i = mPendingRequestsList.begin();
6030 i != mPendingRequestsList.end(); i++) {
6031 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6032 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6033 i->input_buffer);
6034 }
6035 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6036 mPendingBuffersMap.get_num_overall_buffers());
6037 dprintf(fd, "-------+------------------\n");
6038 dprintf(fd, " Frame | Stream type mask \n");
6039 dprintf(fd, "-------+------------------\n");
6040 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6041 for(auto &j : req.mPendingBufferList) {
6042 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6043 dprintf(fd, " %5d | %11d \n",
6044 req.frame_number, channel->getStreamTypeMask());
6045 }
6046 }
6047 dprintf(fd, "-------+------------------\n");
6048
6049 dprintf(fd, "\nPending frame drop list: %zu\n",
6050 mPendingFrameDropList.size());
6051 dprintf(fd, "-------+-----------\n");
6052 dprintf(fd, " Frame | Stream ID \n");
6053 dprintf(fd, "-------+-----------\n");
6054 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6055 i != mPendingFrameDropList.end(); i++) {
6056 dprintf(fd, " %5d | %9d \n",
6057 i->frame_number, i->stream_ID);
6058 }
6059 dprintf(fd, "-------+-----------\n");
6060
6061 dprintf(fd, "\n Camera HAL3 information End \n");
6062
6063 /* use dumpsys media.camera as trigger to send update debug level event */
6064 mUpdateDebugLevel = true;
6065 pthread_mutex_unlock(&mMutex);
6066 return;
6067}
6068
6069/*===========================================================================
6070 * FUNCTION : flush
6071 *
6072 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6073 * conditionally restarts channels
6074 *
6075 * PARAMETERS :
6076 * @ restartChannels: re-start all channels
6077 *
6078 *
6079 * RETURN :
6080 * 0 on success
6081 * Error code on failure
6082 *==========================================================================*/
6083int QCamera3HardwareInterface::flush(bool restartChannels)
6084{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006085 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006086 int32_t rc = NO_ERROR;
6087
6088 LOGD("Unblocking Process Capture Request");
6089 pthread_mutex_lock(&mMutex);
6090 mFlush = true;
6091 pthread_mutex_unlock(&mMutex);
6092
6093 rc = stopAllChannels();
6094 // unlink of dualcam
6095 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006096 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6097 &m_pDualCamCmdPtr->bundle_info;
6098 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006099 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6100 pthread_mutex_lock(&gCamLock);
6101
6102 if (mIsMainCamera == 1) {
6103 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6104 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006105 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006106 // related session id should be session id of linked session
6107 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6108 } else {
6109 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6110 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006111 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006112 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6113 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006114 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006115 pthread_mutex_unlock(&gCamLock);
6116
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006117 rc = mCameraHandle->ops->set_dual_cam_cmd(
6118 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006119 if (rc < 0) {
6120 LOGE("Dualcam: Unlink failed, but still proceed to close");
6121 }
6122 }
6123
6124 if (rc < 0) {
6125 LOGE("stopAllChannels failed");
6126 return rc;
6127 }
6128 if (mChannelHandle) {
6129 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6130 mChannelHandle);
6131 }
6132
6133 // Reset bundle info
6134 rc = setBundleInfo();
6135 if (rc < 0) {
6136 LOGE("setBundleInfo failed %d", rc);
6137 return rc;
6138 }
6139
6140 // Mutex Lock
6141 pthread_mutex_lock(&mMutex);
6142
6143 // Unblock process_capture_request
6144 mPendingLiveRequest = 0;
6145 pthread_cond_signal(&mRequestCond);
6146
6147 rc = notifyErrorForPendingRequests();
6148 if (rc < 0) {
6149 LOGE("notifyErrorForPendingRequests failed");
6150 pthread_mutex_unlock(&mMutex);
6151 return rc;
6152 }
6153
6154 mFlush = false;
6155
6156 // Start the Streams/Channels
6157 if (restartChannels) {
6158 rc = startAllChannels();
6159 if (rc < 0) {
6160 LOGE("startAllChannels failed");
6161 pthread_mutex_unlock(&mMutex);
6162 return rc;
6163 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006164 if (mChannelHandle) {
6165 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6166 mChannelHandle);
6167 if (rc < 0) {
6168 LOGE("start_channel failed");
6169 pthread_mutex_unlock(&mMutex);
6170 return rc;
6171 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006172 }
6173 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006174 pthread_mutex_unlock(&mMutex);
6175
6176 return 0;
6177}
6178
6179/*===========================================================================
6180 * FUNCTION : flushPerf
6181 *
6182 * DESCRIPTION: This is the performance optimization version of flush that does
6183 * not use stream off, rather flushes the system
6184 *
6185 * PARAMETERS :
6186 *
6187 *
6188 * RETURN : 0 : success
6189 * -EINVAL: input is malformed (device is not valid)
6190 * -ENODEV: if the device has encountered a serious error
6191 *==========================================================================*/
6192int QCamera3HardwareInterface::flushPerf()
6193{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006194 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006195 int32_t rc = 0;
6196 struct timespec timeout;
6197 bool timed_wait = false;
6198
6199 pthread_mutex_lock(&mMutex);
6200 mFlushPerf = true;
6201 mPendingBuffersMap.numPendingBufsAtFlush =
6202 mPendingBuffersMap.get_num_overall_buffers();
6203 LOGD("Calling flush. Wait for %d buffers to return",
6204 mPendingBuffersMap.numPendingBufsAtFlush);
6205
6206 /* send the flush event to the backend */
6207 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6208 if (rc < 0) {
6209 LOGE("Error in flush: IOCTL failure");
6210 mFlushPerf = false;
6211 pthread_mutex_unlock(&mMutex);
6212 return -ENODEV;
6213 }
6214
6215 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6216 LOGD("No pending buffers in HAL, return flush");
6217 mFlushPerf = false;
6218 pthread_mutex_unlock(&mMutex);
6219 return rc;
6220 }
6221
6222 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006223 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006224 if (rc < 0) {
6225 LOGE("Error reading the real time clock, cannot use timed wait");
6226 } else {
6227 timeout.tv_sec += FLUSH_TIMEOUT;
6228 timed_wait = true;
6229 }
6230
6231 //Block on conditional variable
6232 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6233 LOGD("Waiting on mBuffersCond");
6234 if (!timed_wait) {
6235 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6236 if (rc != 0) {
6237 LOGE("pthread_cond_wait failed due to rc = %s",
6238 strerror(rc));
6239 break;
6240 }
6241 } else {
6242 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6243 if (rc != 0) {
6244 LOGE("pthread_cond_timedwait failed due to rc = %s",
6245 strerror(rc));
6246 break;
6247 }
6248 }
6249 }
6250 if (rc != 0) {
6251 mFlushPerf = false;
6252 pthread_mutex_unlock(&mMutex);
6253 return -ENODEV;
6254 }
6255
6256 LOGD("Received buffers, now safe to return them");
6257
6258 //make sure the channels handle flush
6259 //currently only required for the picture channel to release snapshot resources
6260 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6261 it != mStreamInfo.end(); it++) {
6262 QCamera3Channel *channel = (*it)->channel;
6263 if (channel) {
6264 rc = channel->flush();
6265 if (rc) {
6266 LOGE("Flushing the channels failed with error %d", rc);
6267 // even though the channel flush failed we need to continue and
6268 // return the buffers we have to the framework, however the return
6269 // value will be an error
6270 rc = -ENODEV;
6271 }
6272 }
6273 }
6274
6275 /* notify the frameworks and send errored results */
6276 rc = notifyErrorForPendingRequests();
6277 if (rc < 0) {
6278 LOGE("notifyErrorForPendingRequests failed");
6279 pthread_mutex_unlock(&mMutex);
6280 return rc;
6281 }
6282
6283 //unblock process_capture_request
6284 mPendingLiveRequest = 0;
6285 unblockRequestIfNecessary();
6286
6287 mFlushPerf = false;
6288 pthread_mutex_unlock(&mMutex);
6289 LOGD ("Flush Operation complete. rc = %d", rc);
6290 return rc;
6291}
6292
6293/*===========================================================================
6294 * FUNCTION : handleCameraDeviceError
6295 *
6296 * DESCRIPTION: This function calls internal flush and notifies the error to
6297 * framework and updates the state variable.
6298 *
6299 * PARAMETERS : None
6300 *
6301 * RETURN : NO_ERROR on Success
6302 * Error code on failure
6303 *==========================================================================*/
6304int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6305{
6306 int32_t rc = NO_ERROR;
6307
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006308 {
6309 Mutex::Autolock lock(mFlushLock);
6310 pthread_mutex_lock(&mMutex);
6311 if (mState != ERROR) {
6312 //if mState != ERROR, nothing to be done
6313 pthread_mutex_unlock(&mMutex);
6314 return NO_ERROR;
6315 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006316 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006317
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006318 rc = flush(false /* restart channels */);
6319 if (NO_ERROR != rc) {
6320 LOGE("internal flush to handle mState = ERROR failed");
6321 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006322
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006323 pthread_mutex_lock(&mMutex);
6324 mState = DEINIT;
6325 pthread_mutex_unlock(&mMutex);
6326 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006327
6328 camera3_notify_msg_t notify_msg;
6329 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6330 notify_msg.type = CAMERA3_MSG_ERROR;
6331 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6332 notify_msg.message.error.error_stream = NULL;
6333 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006334 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006335
6336 return rc;
6337}
6338
6339/*===========================================================================
6340 * FUNCTION : captureResultCb
6341 *
6342 * DESCRIPTION: Callback handler for all capture result
6343 * (streams, as well as metadata)
6344 *
6345 * PARAMETERS :
6346 * @metadata : metadata information
6347 * @buffer : actual gralloc buffer to be returned to frameworks.
6348 * NULL if metadata.
6349 *
6350 * RETURN : NONE
6351 *==========================================================================*/
6352void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6353 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6354{
6355 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006356 pthread_mutex_lock(&mMutex);
6357 uint8_t batchSize = mBatchSize;
6358 pthread_mutex_unlock(&mMutex);
6359 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006360 handleBatchMetadata(metadata_buf,
6361 true /* free_and_bufdone_meta_buf */);
6362 } else { /* mBatchSize = 0 */
6363 hdrPlusPerfLock(metadata_buf);
6364 pthread_mutex_lock(&mMutex);
6365 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006366 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006367 true /* last urgent frame of batch metadata */,
6368 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006369 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006370 pthread_mutex_unlock(&mMutex);
6371 }
6372 } else if (isInputBuffer) {
6373 pthread_mutex_lock(&mMutex);
6374 handleInputBufferWithLock(frame_number);
6375 pthread_mutex_unlock(&mMutex);
6376 } else {
6377 pthread_mutex_lock(&mMutex);
6378 handleBufferWithLock(buffer, frame_number);
6379 pthread_mutex_unlock(&mMutex);
6380 }
6381 return;
6382}
6383
6384/*===========================================================================
6385 * FUNCTION : getReprocessibleOutputStreamId
6386 *
6387 * DESCRIPTION: Get source output stream id for the input reprocess stream
6388 * based on size and format, which would be the largest
6389 * output stream if an input stream exists.
6390 *
6391 * PARAMETERS :
6392 * @id : return the stream id if found
6393 *
6394 * RETURN : int32_t type of status
6395 * NO_ERROR -- success
6396 * none-zero failure code
6397 *==========================================================================*/
6398int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6399{
6400 /* check if any output or bidirectional stream with the same size and format
6401 and return that stream */
6402 if ((mInputStreamInfo.dim.width > 0) &&
6403 (mInputStreamInfo.dim.height > 0)) {
6404 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6405 it != mStreamInfo.end(); it++) {
6406
6407 camera3_stream_t *stream = (*it)->stream;
6408 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6409 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6410 (stream->format == mInputStreamInfo.format)) {
6411 // Usage flag for an input stream and the source output stream
6412 // may be different.
6413 LOGD("Found reprocessible output stream! %p", *it);
6414 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6415 stream->usage, mInputStreamInfo.usage);
6416
6417 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6418 if (channel != NULL && channel->mStreams[0]) {
6419 id = channel->mStreams[0]->getMyServerID();
6420 return NO_ERROR;
6421 }
6422 }
6423 }
6424 } else {
6425 LOGD("No input stream, so no reprocessible output stream");
6426 }
6427 return NAME_NOT_FOUND;
6428}
6429
6430/*===========================================================================
6431 * FUNCTION : lookupFwkName
6432 *
6433 * DESCRIPTION: In case the enum is not same in fwk and backend
6434 * make sure the parameter is correctly propogated
6435 *
6436 * PARAMETERS :
6437 * @arr : map between the two enums
6438 * @len : len of the map
6439 * @hal_name : name of the hal_parm to map
6440 *
6441 * RETURN : int type of status
6442 * fwk_name -- success
6443 * none-zero failure code
6444 *==========================================================================*/
6445template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6446 size_t len, halType hal_name)
6447{
6448
6449 for (size_t i = 0; i < len; i++) {
6450 if (arr[i].hal_name == hal_name) {
6451 return arr[i].fwk_name;
6452 }
6453 }
6454
6455 /* Not able to find matching framework type is not necessarily
6456 * an error case. This happens when mm-camera supports more attributes
6457 * than the frameworks do */
6458 LOGH("Cannot find matching framework type");
6459 return NAME_NOT_FOUND;
6460}
6461
6462/*===========================================================================
6463 * FUNCTION : lookupHalName
6464 *
6465 * DESCRIPTION: In case the enum is not same in fwk and backend
6466 * make sure the parameter is correctly propogated
6467 *
6468 * PARAMETERS :
6469 * @arr : map between the two enums
6470 * @len : len of the map
6471 * @fwk_name : name of the hal_parm to map
6472 *
6473 * RETURN : int32_t type of status
6474 * hal_name -- success
6475 * none-zero failure code
6476 *==========================================================================*/
6477template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6478 size_t len, fwkType fwk_name)
6479{
6480 for (size_t i = 0; i < len; i++) {
6481 if (arr[i].fwk_name == fwk_name) {
6482 return arr[i].hal_name;
6483 }
6484 }
6485
6486 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6487 return NAME_NOT_FOUND;
6488}
6489
6490/*===========================================================================
6491 * FUNCTION : lookupProp
6492 *
6493 * DESCRIPTION: lookup a value by its name
6494 *
6495 * PARAMETERS :
6496 * @arr : map between the two enums
6497 * @len : size of the map
6498 * @name : name to be looked up
6499 *
6500 * RETURN : Value if found
6501 * CAM_CDS_MODE_MAX if not found
6502 *==========================================================================*/
6503template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6504 size_t len, const char *name)
6505{
6506 if (name) {
6507 for (size_t i = 0; i < len; i++) {
6508 if (!strcmp(arr[i].desc, name)) {
6509 return arr[i].val;
6510 }
6511 }
6512 }
6513 return CAM_CDS_MODE_MAX;
6514}
6515
6516/*===========================================================================
6517 *
6518 * DESCRIPTION:
6519 *
6520 * PARAMETERS :
6521 * @metadata : metadata information from callback
6522 * @timestamp: metadata buffer timestamp
6523 * @request_id: request id
6524 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006525 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006526 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6527 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006528 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006529 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6530 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006531 *
6532 * RETURN : camera_metadata_t*
6533 * metadata in a format specified by fwk
6534 *==========================================================================*/
6535camera_metadata_t*
6536QCamera3HardwareInterface::translateFromHalMetadata(
6537 metadata_buffer_t *metadata,
6538 nsecs_t timestamp,
6539 int32_t request_id,
6540 const CameraMetadata& jpegMetadata,
6541 uint8_t pipeline_depth,
6542 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006543 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006544 /* DevCamDebug metadata translateFromHalMetadata argument */
6545 uint8_t DevCamDebug_meta_enable,
6546 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006547 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006548 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006549 bool lastMetadataInBatch,
6550 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006551{
6552 CameraMetadata camMetadata;
6553 camera_metadata_t *resultMetadata;
6554
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006555 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006556 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6557 * Timestamp is needed because it's used for shutter notify calculation.
6558 * */
6559 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6560 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006561 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006562 }
6563
Thierry Strudel3d639192016-09-09 11:52:26 -07006564 if (jpegMetadata.entryCount())
6565 camMetadata.append(jpegMetadata);
6566
6567 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6568 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6569 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6570 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006571 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006572 if (mBatchSize == 0) {
6573 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6574 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6575 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006576
Samuel Ha68ba5172016-12-15 18:41:12 -08006577 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6578 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6579 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6580 // DevCamDebug metadata translateFromHalMetadata AF
6581 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6582 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6583 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6584 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6585 }
6586 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6587 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6588 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6589 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6590 }
6591 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6592 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6593 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6594 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6595 }
6596 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6597 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6598 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6599 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6600 }
6601 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6602 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6603 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6604 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6605 }
6606 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6607 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6608 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6609 *DevCamDebug_af_monitor_pdaf_target_pos;
6610 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6611 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6612 }
6613 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6614 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6615 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6616 *DevCamDebug_af_monitor_pdaf_confidence;
6617 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6618 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6619 }
6620 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6621 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6622 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6623 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6624 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6625 }
6626 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6627 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6628 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6629 *DevCamDebug_af_monitor_tof_target_pos;
6630 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6631 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6632 }
6633 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6634 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6635 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6636 *DevCamDebug_af_monitor_tof_confidence;
6637 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6638 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6639 }
6640 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6641 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6642 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6643 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6644 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6645 }
6646 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6647 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6648 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6649 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6650 &fwk_DevCamDebug_af_monitor_type_select, 1);
6651 }
6652 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6653 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6654 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6655 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6656 &fwk_DevCamDebug_af_monitor_refocus, 1);
6657 }
6658 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6659 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6660 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6661 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6662 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6663 }
6664 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6665 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6666 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6667 *DevCamDebug_af_search_pdaf_target_pos;
6668 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6669 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6670 }
6671 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6672 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6673 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6674 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6675 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6676 }
6677 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6678 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6679 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6680 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6681 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6682 }
6683 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6684 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6685 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6686 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6687 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6688 }
6689 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6690 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6691 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6692 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6693 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6694 }
6695 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6696 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6697 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6698 *DevCamDebug_af_search_tof_target_pos;
6699 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6700 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6701 }
6702 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6703 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6704 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6705 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6706 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6707 }
6708 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6709 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6710 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6711 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6712 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6713 }
6714 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6715 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6716 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6717 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6718 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6719 }
6720 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6721 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6722 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6723 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6724 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6725 }
6726 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6727 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6728 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6729 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6730 &fwk_DevCamDebug_af_search_type_select, 1);
6731 }
6732 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6733 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6734 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6735 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6736 &fwk_DevCamDebug_af_search_next_pos, 1);
6737 }
6738 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6739 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6740 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6741 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6742 &fwk_DevCamDebug_af_search_target_pos, 1);
6743 }
6744 // DevCamDebug metadata translateFromHalMetadata AEC
6745 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6746 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6747 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6748 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6749 }
6750 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6751 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6752 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6753 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6754 }
6755 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6756 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6757 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6758 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6759 }
6760 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6761 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6762 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6763 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6764 }
6765 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6766 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6767 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6768 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6769 }
6770 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6771 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6772 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6773 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6774 }
6775 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6776 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6777 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6778 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6779 }
6780 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6781 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6782 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6783 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6784 }
Samuel Ha34229982017-02-17 13:51:11 -08006785 // DevCamDebug metadata translateFromHalMetadata zzHDR
6786 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6787 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6788 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6789 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6790 }
6791 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6792 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006793 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006794 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6795 }
6796 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6797 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6798 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6799 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6800 }
6801 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6802 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006803 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006804 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6805 }
6806 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6807 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6808 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6809 *DevCamDebug_aec_hdr_sensitivity_ratio;
6810 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6811 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6812 }
6813 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6814 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6815 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6816 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6817 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6818 }
6819 // DevCamDebug metadata translateFromHalMetadata ADRC
6820 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6821 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6822 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6823 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6824 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6825 }
6826 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6827 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6828 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6829 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6830 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6831 }
6832 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6833 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6834 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6835 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6836 }
6837 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6838 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6839 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6840 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6841 }
6842 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6843 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6844 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6845 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6846 }
6847 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6848 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6849 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6850 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6851 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006852 // DevCamDebug metadata translateFromHalMetadata AWB
6853 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6854 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6855 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6856 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6857 }
6858 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6859 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6860 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6861 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6862 }
6863 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6864 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6865 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6866 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6867 }
6868 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6869 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6870 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6871 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6872 }
6873 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6874 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6875 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6876 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6877 }
6878 }
6879 // atrace_end(ATRACE_TAG_ALWAYS);
6880
Thierry Strudel3d639192016-09-09 11:52:26 -07006881 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6882 int64_t fwk_frame_number = *frame_number;
6883 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6884 }
6885
6886 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6887 int32_t fps_range[2];
6888 fps_range[0] = (int32_t)float_range->min_fps;
6889 fps_range[1] = (int32_t)float_range->max_fps;
6890 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6891 fps_range, 2);
6892 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6893 fps_range[0], fps_range[1]);
6894 }
6895
6896 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6897 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6898 }
6899
6900 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6901 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6902 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6903 *sceneMode);
6904 if (NAME_NOT_FOUND != val) {
6905 uint8_t fwkSceneMode = (uint8_t)val;
6906 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6907 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6908 fwkSceneMode);
6909 }
6910 }
6911
6912 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6913 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6914 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6915 }
6916
6917 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6918 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6919 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6920 }
6921
6922 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6923 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6924 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6925 }
6926
6927 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6928 CAM_INTF_META_EDGE_MODE, metadata) {
6929 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6930 }
6931
6932 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6933 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6934 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6935 }
6936
6937 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6938 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6939 }
6940
6941 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6942 if (0 <= *flashState) {
6943 uint8_t fwk_flashState = (uint8_t) *flashState;
6944 if (!gCamCapability[mCameraId]->flash_available) {
6945 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6946 }
6947 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6948 }
6949 }
6950
6951 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6952 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6953 if (NAME_NOT_FOUND != val) {
6954 uint8_t fwk_flashMode = (uint8_t)val;
6955 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6956 }
6957 }
6958
6959 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6960 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6961 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6962 }
6963
6964 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6965 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6966 }
6967
6968 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6969 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6970 }
6971
6972 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6973 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6974 }
6975
6976 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6977 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6978 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6979 }
6980
6981 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6982 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6983 LOGD("fwk_videoStab = %d", fwk_videoStab);
6984 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6985 } else {
6986 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6987 // and so hardcoding the Video Stab result to OFF mode.
6988 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6989 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006990 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006991 }
6992
6993 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6994 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6995 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6996 }
6997
6998 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6999 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7000 }
7001
Thierry Strudel3d639192016-09-09 11:52:26 -07007002 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7003 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007004 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007005
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007006 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7007 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007008
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007009 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007010 blackLevelAppliedPattern->cam_black_level[0],
7011 blackLevelAppliedPattern->cam_black_level[1],
7012 blackLevelAppliedPattern->cam_black_level[2],
7013 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007014 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7015 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007016
7017#ifndef USE_HAL_3_3
7018 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307019 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007020 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307021 fwk_blackLevelInd[0] /= 16.0;
7022 fwk_blackLevelInd[1] /= 16.0;
7023 fwk_blackLevelInd[2] /= 16.0;
7024 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007025 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7026 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007027#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007028 }
7029
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007030#ifndef USE_HAL_3_3
7031 // Fixed whitelevel is used by ISP/Sensor
7032 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7033 &gCamCapability[mCameraId]->white_level, 1);
7034#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007035
7036 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7037 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7038 int32_t scalerCropRegion[4];
7039 scalerCropRegion[0] = hScalerCropRegion->left;
7040 scalerCropRegion[1] = hScalerCropRegion->top;
7041 scalerCropRegion[2] = hScalerCropRegion->width;
7042 scalerCropRegion[3] = hScalerCropRegion->height;
7043
7044 // Adjust crop region from sensor output coordinate system to active
7045 // array coordinate system.
7046 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7047 scalerCropRegion[2], scalerCropRegion[3]);
7048
7049 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7050 }
7051
7052 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7053 LOGD("sensorExpTime = %lld", *sensorExpTime);
7054 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7055 }
7056
7057 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7058 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7059 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7060 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7061 }
7062
7063 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7064 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7065 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7066 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7067 sensorRollingShutterSkew, 1);
7068 }
7069
7070 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7071 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7072 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7073
7074 //calculate the noise profile based on sensitivity
7075 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7076 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7077 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7078 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7079 noise_profile[i] = noise_profile_S;
7080 noise_profile[i+1] = noise_profile_O;
7081 }
7082 LOGD("noise model entry (S, O) is (%f, %f)",
7083 noise_profile_S, noise_profile_O);
7084 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7085 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7086 }
7087
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007088#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007089 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007090 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007091 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007092 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007093 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7094 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7095 }
7096 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007097#endif
7098
Thierry Strudel3d639192016-09-09 11:52:26 -07007099 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7100 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7101 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7102 }
7103
7104 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7105 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7106 *faceDetectMode);
7107 if (NAME_NOT_FOUND != val) {
7108 uint8_t fwk_faceDetectMode = (uint8_t)val;
7109 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7110
7111 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7112 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7113 CAM_INTF_META_FACE_DETECTION, metadata) {
7114 uint8_t numFaces = MIN(
7115 faceDetectionInfo->num_faces_detected, MAX_ROI);
7116 int32_t faceIds[MAX_ROI];
7117 uint8_t faceScores[MAX_ROI];
7118 int32_t faceRectangles[MAX_ROI * 4];
7119 int32_t faceLandmarks[MAX_ROI * 6];
7120 size_t j = 0, k = 0;
7121
7122 for (size_t i = 0; i < numFaces; i++) {
7123 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7124 // Adjust crop region from sensor output coordinate system to active
7125 // array coordinate system.
7126 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7127 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7128 rect.width, rect.height);
7129
7130 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7131 faceRectangles+j, -1);
7132
Jason Lee8ce36fa2017-04-19 19:40:37 -07007133 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7134 "bottom-right (%d, %d)",
7135 faceDetectionInfo->frame_id, i,
7136 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7137 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7138
Thierry Strudel3d639192016-09-09 11:52:26 -07007139 j+= 4;
7140 }
7141 if (numFaces <= 0) {
7142 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7143 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7144 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7145 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7146 }
7147
7148 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7149 numFaces);
7150 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7151 faceRectangles, numFaces * 4U);
7152 if (fwk_faceDetectMode ==
7153 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7154 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7155 CAM_INTF_META_FACE_LANDMARK, metadata) {
7156
7157 for (size_t i = 0; i < numFaces; i++) {
7158 // Map the co-ordinate sensor output coordinate system to active
7159 // array coordinate system.
7160 mCropRegionMapper.toActiveArray(
7161 landmarks->face_landmarks[i].left_eye_center.x,
7162 landmarks->face_landmarks[i].left_eye_center.y);
7163 mCropRegionMapper.toActiveArray(
7164 landmarks->face_landmarks[i].right_eye_center.x,
7165 landmarks->face_landmarks[i].right_eye_center.y);
7166 mCropRegionMapper.toActiveArray(
7167 landmarks->face_landmarks[i].mouth_center.x,
7168 landmarks->face_landmarks[i].mouth_center.y);
7169
7170 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007171
7172 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7173 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7174 faceDetectionInfo->frame_id, i,
7175 faceLandmarks[k + LEFT_EYE_X],
7176 faceLandmarks[k + LEFT_EYE_Y],
7177 faceLandmarks[k + RIGHT_EYE_X],
7178 faceLandmarks[k + RIGHT_EYE_Y],
7179 faceLandmarks[k + MOUTH_X],
7180 faceLandmarks[k + MOUTH_Y]);
7181
Thierry Strudel04e026f2016-10-10 11:27:36 -07007182 k+= TOTAL_LANDMARK_INDICES;
7183 }
7184 } else {
7185 for (size_t i = 0; i < numFaces; i++) {
7186 setInvalidLandmarks(faceLandmarks+k);
7187 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007188 }
7189 }
7190
Jason Lee49619db2017-04-13 12:07:22 -07007191 for (size_t i = 0; i < numFaces; i++) {
7192 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7193
7194 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7195 faceDetectionInfo->frame_id, i, faceIds[i]);
7196 }
7197
Thierry Strudel3d639192016-09-09 11:52:26 -07007198 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7199 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7200 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007201 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007202 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7203 CAM_INTF_META_FACE_BLINK, metadata) {
7204 uint8_t detected[MAX_ROI];
7205 uint8_t degree[MAX_ROI * 2];
7206 for (size_t i = 0; i < numFaces; i++) {
7207 detected[i] = blinks->blink[i].blink_detected;
7208 degree[2 * i] = blinks->blink[i].left_blink;
7209 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007210
Jason Lee49619db2017-04-13 12:07:22 -07007211 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7212 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7213 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7214 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007215 }
7216 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7217 detected, numFaces);
7218 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7219 degree, numFaces * 2);
7220 }
7221 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7222 CAM_INTF_META_FACE_SMILE, metadata) {
7223 uint8_t degree[MAX_ROI];
7224 uint8_t confidence[MAX_ROI];
7225 for (size_t i = 0; i < numFaces; i++) {
7226 degree[i] = smiles->smile[i].smile_degree;
7227 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007228
Jason Lee49619db2017-04-13 12:07:22 -07007229 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7230 "smile_degree=%d, smile_score=%d",
7231 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007232 }
7233 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7234 degree, numFaces);
7235 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7236 confidence, numFaces);
7237 }
7238 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7239 CAM_INTF_META_FACE_GAZE, metadata) {
7240 int8_t angle[MAX_ROI];
7241 int32_t direction[MAX_ROI * 3];
7242 int8_t degree[MAX_ROI * 2];
7243 for (size_t i = 0; i < numFaces; i++) {
7244 angle[i] = gazes->gaze[i].gaze_angle;
7245 direction[3 * i] = gazes->gaze[i].updown_dir;
7246 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7247 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7248 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7249 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007250
7251 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7252 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7253 "left_right_gaze=%d, top_bottom_gaze=%d",
7254 faceDetectionInfo->frame_id, i, angle[i],
7255 direction[3 * i], direction[3 * i + 1],
7256 direction[3 * i + 2],
7257 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007258 }
7259 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7260 (uint8_t *)angle, numFaces);
7261 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7262 direction, numFaces * 3);
7263 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7264 (uint8_t *)degree, numFaces * 2);
7265 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007266 }
7267 }
7268 }
7269 }
7270
7271 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7272 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007273 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007274 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007275 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007276
Shuzhen Wang14415f52016-11-16 18:26:18 -08007277 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7278 histogramBins = *histBins;
7279 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7280 }
7281
7282 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007283 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7284 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007285 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007286
7287 switch (stats_data->type) {
7288 case CAM_HISTOGRAM_TYPE_BAYER:
7289 switch (stats_data->bayer_stats.data_type) {
7290 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007291 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7292 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007293 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007294 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7295 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007296 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007297 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7298 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007299 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007300 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007301 case CAM_STATS_CHANNEL_R:
7302 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007303 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7304 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007305 }
7306 break;
7307 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007308 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007309 break;
7310 }
7311
Shuzhen Wang14415f52016-11-16 18:26:18 -08007312 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007313 }
7314 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007315 }
7316
7317 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7318 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7319 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7320 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7321 }
7322
7323 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7324 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7325 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7326 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7327 }
7328
7329 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7330 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7331 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7332 CAM_MAX_SHADING_MAP_HEIGHT);
7333 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7334 CAM_MAX_SHADING_MAP_WIDTH);
7335 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7336 lensShadingMap->lens_shading, 4U * map_width * map_height);
7337 }
7338
7339 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7340 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7341 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7342 }
7343
7344 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7345 //Populate CAM_INTF_META_TONEMAP_CURVES
7346 /* ch0 = G, ch 1 = B, ch 2 = R*/
7347 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7348 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7349 tonemap->tonemap_points_cnt,
7350 CAM_MAX_TONEMAP_CURVE_SIZE);
7351 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7352 }
7353
7354 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7355 &tonemap->curves[0].tonemap_points[0][0],
7356 tonemap->tonemap_points_cnt * 2);
7357
7358 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7359 &tonemap->curves[1].tonemap_points[0][0],
7360 tonemap->tonemap_points_cnt * 2);
7361
7362 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7363 &tonemap->curves[2].tonemap_points[0][0],
7364 tonemap->tonemap_points_cnt * 2);
7365 }
7366
7367 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7368 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7369 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7370 CC_GAIN_MAX);
7371 }
7372
7373 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7374 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7375 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7376 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7377 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7378 }
7379
7380 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7381 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7382 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7383 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7384 toneCurve->tonemap_points_cnt,
7385 CAM_MAX_TONEMAP_CURVE_SIZE);
7386 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7387 }
7388 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7389 (float*)toneCurve->curve.tonemap_points,
7390 toneCurve->tonemap_points_cnt * 2);
7391 }
7392
7393 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7394 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7395 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7396 predColorCorrectionGains->gains, 4);
7397 }
7398
7399 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7400 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7401 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7402 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7403 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7404 }
7405
7406 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7407 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7408 }
7409
7410 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7411 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7412 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7413 }
7414
7415 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7416 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7417 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7418 }
7419
7420 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7421 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7422 *effectMode);
7423 if (NAME_NOT_FOUND != val) {
7424 uint8_t fwk_effectMode = (uint8_t)val;
7425 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7426 }
7427 }
7428
7429 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7430 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7431 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7432 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7433 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7434 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7435 }
7436 int32_t fwk_testPatternData[4];
7437 fwk_testPatternData[0] = testPatternData->r;
7438 fwk_testPatternData[3] = testPatternData->b;
7439 switch (gCamCapability[mCameraId]->color_arrangement) {
7440 case CAM_FILTER_ARRANGEMENT_RGGB:
7441 case CAM_FILTER_ARRANGEMENT_GRBG:
7442 fwk_testPatternData[1] = testPatternData->gr;
7443 fwk_testPatternData[2] = testPatternData->gb;
7444 break;
7445 case CAM_FILTER_ARRANGEMENT_GBRG:
7446 case CAM_FILTER_ARRANGEMENT_BGGR:
7447 fwk_testPatternData[2] = testPatternData->gr;
7448 fwk_testPatternData[1] = testPatternData->gb;
7449 break;
7450 default:
7451 LOGE("color arrangement %d is not supported",
7452 gCamCapability[mCameraId]->color_arrangement);
7453 break;
7454 }
7455 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7456 }
7457
7458 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7459 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7460 }
7461
7462 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7463 String8 str((const char *)gps_methods);
7464 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7465 }
7466
7467 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7468 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7469 }
7470
7471 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7472 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7473 }
7474
7475 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7476 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7477 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7478 }
7479
7480 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7481 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7482 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7483 }
7484
7485 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7486 int32_t fwk_thumb_size[2];
7487 fwk_thumb_size[0] = thumb_size->width;
7488 fwk_thumb_size[1] = thumb_size->height;
7489 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7490 }
7491
7492 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7493 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7494 privateData,
7495 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7496 }
7497
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007498 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007499 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007500 meteringMode, 1);
7501 }
7502
Thierry Strudel54dc9782017-02-15 12:12:10 -08007503 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7504 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7505 LOGD("hdr_scene_data: %d %f\n",
7506 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7507 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7508 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7509 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7510 &isHdr, 1);
7511 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7512 &isHdrConfidence, 1);
7513 }
7514
7515
7516
Thierry Strudel3d639192016-09-09 11:52:26 -07007517 if (metadata->is_tuning_params_valid) {
7518 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7519 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7520 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7521
7522
7523 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7524 sizeof(uint32_t));
7525 data += sizeof(uint32_t);
7526
7527 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7528 sizeof(uint32_t));
7529 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7530 data += sizeof(uint32_t);
7531
7532 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7533 sizeof(uint32_t));
7534 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7535 data += sizeof(uint32_t);
7536
7537 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7538 sizeof(uint32_t));
7539 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7540 data += sizeof(uint32_t);
7541
7542 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7543 sizeof(uint32_t));
7544 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7545 data += sizeof(uint32_t);
7546
7547 metadata->tuning_params.tuning_mod3_data_size = 0;
7548 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7549 sizeof(uint32_t));
7550 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7551 data += sizeof(uint32_t);
7552
7553 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7554 TUNING_SENSOR_DATA_MAX);
7555 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7556 count);
7557 data += count;
7558
7559 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7560 TUNING_VFE_DATA_MAX);
7561 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7562 count);
7563 data += count;
7564
7565 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7566 TUNING_CPP_DATA_MAX);
7567 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7568 count);
7569 data += count;
7570
7571 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7572 TUNING_CAC_DATA_MAX);
7573 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7574 count);
7575 data += count;
7576
7577 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7578 (int32_t *)(void *)tuning_meta_data_blob,
7579 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7580 }
7581
7582 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7583 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7584 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7585 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7586 NEUTRAL_COL_POINTS);
7587 }
7588
7589 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7590 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7591 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7592 }
7593
7594 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7595 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7596 // Adjust crop region from sensor output coordinate system to active
7597 // array coordinate system.
7598 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7599 hAeRegions->rect.width, hAeRegions->rect.height);
7600
7601 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7602 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7603 REGIONS_TUPLE_COUNT);
7604 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7605 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7606 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7607 hAeRegions->rect.height);
7608 }
7609
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007610 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7611 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7612 if (NAME_NOT_FOUND != val) {
7613 uint8_t fwkAfMode = (uint8_t)val;
7614 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7615 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7616 } else {
7617 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7618 val);
7619 }
7620 }
7621
Thierry Strudel3d639192016-09-09 11:52:26 -07007622 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7623 uint8_t fwk_afState = (uint8_t) *afState;
7624 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007625 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007626 }
7627
7628 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7629 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7630 }
7631
7632 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7633 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7634 }
7635
7636 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7637 uint8_t fwk_lensState = *lensState;
7638 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7639 }
7640
Thierry Strudel3d639192016-09-09 11:52:26 -07007641
7642 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007643 uint32_t ab_mode = *hal_ab_mode;
7644 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7645 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7646 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7647 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007648 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007649 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007650 if (NAME_NOT_FOUND != val) {
7651 uint8_t fwk_ab_mode = (uint8_t)val;
7652 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7653 }
7654 }
7655
7656 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7657 int val = lookupFwkName(SCENE_MODES_MAP,
7658 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7659 if (NAME_NOT_FOUND != val) {
7660 uint8_t fwkBestshotMode = (uint8_t)val;
7661 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7662 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7663 } else {
7664 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7665 }
7666 }
7667
7668 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7669 uint8_t fwk_mode = (uint8_t) *mode;
7670 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7671 }
7672
7673 /* Constant metadata values to be update*/
7674 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7675 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7676
7677 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7678 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7679
7680 int32_t hotPixelMap[2];
7681 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7682
7683 // CDS
7684 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7685 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7686 }
7687
Thierry Strudel04e026f2016-10-10 11:27:36 -07007688 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7689 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007690 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007691 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7692 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7693 } else {
7694 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7695 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007696
7697 if(fwk_hdr != curr_hdr_state) {
7698 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7699 if(fwk_hdr)
7700 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7701 else
7702 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7703 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007704 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7705 }
7706
Thierry Strudel54dc9782017-02-15 12:12:10 -08007707 //binning correction
7708 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7709 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7710 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7711 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7712 }
7713
Thierry Strudel04e026f2016-10-10 11:27:36 -07007714 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007715 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007716 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7717 int8_t is_ir_on = 0;
7718
7719 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7720 if(is_ir_on != curr_ir_state) {
7721 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7722 if(is_ir_on)
7723 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7724 else
7725 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7726 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007727 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007728 }
7729
Thierry Strudel269c81a2016-10-12 12:13:59 -07007730 // AEC SPEED
7731 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7732 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7733 }
7734
7735 // AWB SPEED
7736 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7737 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7738 }
7739
Thierry Strudel3d639192016-09-09 11:52:26 -07007740 // TNR
7741 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7742 uint8_t tnr_enable = tnr->denoise_enable;
7743 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007744 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7745 int8_t is_tnr_on = 0;
7746
7747 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7748 if(is_tnr_on != curr_tnr_state) {
7749 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7750 if(is_tnr_on)
7751 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7752 else
7753 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7754 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007755
7756 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7757 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7758 }
7759
7760 // Reprocess crop data
7761 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7762 uint8_t cnt = crop_data->num_of_streams;
7763 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7764 // mm-qcamera-daemon only posts crop_data for streams
7765 // not linked to pproc. So no valid crop metadata is not
7766 // necessarily an error case.
7767 LOGD("No valid crop metadata entries");
7768 } else {
7769 uint32_t reproc_stream_id;
7770 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7771 LOGD("No reprocessible stream found, ignore crop data");
7772 } else {
7773 int rc = NO_ERROR;
7774 Vector<int32_t> roi_map;
7775 int32_t *crop = new int32_t[cnt*4];
7776 if (NULL == crop) {
7777 rc = NO_MEMORY;
7778 }
7779 if (NO_ERROR == rc) {
7780 int32_t streams_found = 0;
7781 for (size_t i = 0; i < cnt; i++) {
7782 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7783 if (pprocDone) {
7784 // HAL already does internal reprocessing,
7785 // either via reprocessing before JPEG encoding,
7786 // or offline postprocessing for pproc bypass case.
7787 crop[0] = 0;
7788 crop[1] = 0;
7789 crop[2] = mInputStreamInfo.dim.width;
7790 crop[3] = mInputStreamInfo.dim.height;
7791 } else {
7792 crop[0] = crop_data->crop_info[i].crop.left;
7793 crop[1] = crop_data->crop_info[i].crop.top;
7794 crop[2] = crop_data->crop_info[i].crop.width;
7795 crop[3] = crop_data->crop_info[i].crop.height;
7796 }
7797 roi_map.add(crop_data->crop_info[i].roi_map.left);
7798 roi_map.add(crop_data->crop_info[i].roi_map.top);
7799 roi_map.add(crop_data->crop_info[i].roi_map.width);
7800 roi_map.add(crop_data->crop_info[i].roi_map.height);
7801 streams_found++;
7802 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7803 crop[0], crop[1], crop[2], crop[3]);
7804 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7805 crop_data->crop_info[i].roi_map.left,
7806 crop_data->crop_info[i].roi_map.top,
7807 crop_data->crop_info[i].roi_map.width,
7808 crop_data->crop_info[i].roi_map.height);
7809 break;
7810
7811 }
7812 }
7813 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7814 &streams_found, 1);
7815 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7816 crop, (size_t)(streams_found * 4));
7817 if (roi_map.array()) {
7818 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7819 roi_map.array(), roi_map.size());
7820 }
7821 }
7822 if (crop) {
7823 delete [] crop;
7824 }
7825 }
7826 }
7827 }
7828
7829 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7830 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7831 // so hardcoding the CAC result to OFF mode.
7832 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7833 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7834 } else {
7835 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7836 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7837 *cacMode);
7838 if (NAME_NOT_FOUND != val) {
7839 uint8_t resultCacMode = (uint8_t)val;
7840 // check whether CAC result from CB is equal to Framework set CAC mode
7841 // If not equal then set the CAC mode came in corresponding request
7842 if (fwk_cacMode != resultCacMode) {
7843 resultCacMode = fwk_cacMode;
7844 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007845 //Check if CAC is disabled by property
7846 if (m_cacModeDisabled) {
7847 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7848 }
7849
Thierry Strudel3d639192016-09-09 11:52:26 -07007850 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7851 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7852 } else {
7853 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7854 }
7855 }
7856 }
7857
7858 // Post blob of cam_cds_data through vendor tag.
7859 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7860 uint8_t cnt = cdsInfo->num_of_streams;
7861 cam_cds_data_t cdsDataOverride;
7862 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7863 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7864 cdsDataOverride.num_of_streams = 1;
7865 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7866 uint32_t reproc_stream_id;
7867 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7868 LOGD("No reprocessible stream found, ignore cds data");
7869 } else {
7870 for (size_t i = 0; i < cnt; i++) {
7871 if (cdsInfo->cds_info[i].stream_id ==
7872 reproc_stream_id) {
7873 cdsDataOverride.cds_info[0].cds_enable =
7874 cdsInfo->cds_info[i].cds_enable;
7875 break;
7876 }
7877 }
7878 }
7879 } else {
7880 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7881 }
7882 camMetadata.update(QCAMERA3_CDS_INFO,
7883 (uint8_t *)&cdsDataOverride,
7884 sizeof(cam_cds_data_t));
7885 }
7886
7887 // Ldaf calibration data
7888 if (!mLdafCalibExist) {
7889 IF_META_AVAILABLE(uint32_t, ldafCalib,
7890 CAM_INTF_META_LDAF_EXIF, metadata) {
7891 mLdafCalibExist = true;
7892 mLdafCalib[0] = ldafCalib[0];
7893 mLdafCalib[1] = ldafCalib[1];
7894 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7895 ldafCalib[0], ldafCalib[1]);
7896 }
7897 }
7898
Thierry Strudel54dc9782017-02-15 12:12:10 -08007899 // EXIF debug data through vendor tag
7900 /*
7901 * Mobicat Mask can assume 3 values:
7902 * 1 refers to Mobicat data,
7903 * 2 refers to Stats Debug and Exif Debug Data
7904 * 3 refers to Mobicat and Stats Debug Data
7905 * We want to make sure that we are sending Exif debug data
7906 * only when Mobicat Mask is 2.
7907 */
7908 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7909 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7910 (uint8_t *)(void *)mExifParams.debug_params,
7911 sizeof(mm_jpeg_debug_exif_params_t));
7912 }
7913
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007914 // Reprocess and DDM debug data through vendor tag
7915 cam_reprocess_info_t repro_info;
7916 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007917 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7918 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007919 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007920 }
7921 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7922 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007923 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007924 }
7925 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7926 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007927 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007928 }
7929 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7930 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007931 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007932 }
7933 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7934 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007935 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007936 }
7937 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007938 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007939 }
7940 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7941 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007942 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007943 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007944 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7945 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7946 }
7947 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7948 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7949 }
7950 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7951 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007952
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007953 // INSTANT AEC MODE
7954 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7955 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7956 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7957 }
7958
Shuzhen Wange763e802016-03-31 10:24:29 -07007959 // AF scene change
7960 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7961 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7962 }
7963
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07007964 // Enable ZSL
7965 if (enableZsl != nullptr) {
7966 uint8_t value = *enableZsl ?
7967 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
7968 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
7969 }
7970
Thierry Strudel3d639192016-09-09 11:52:26 -07007971 resultMetadata = camMetadata.release();
7972 return resultMetadata;
7973}
7974
7975/*===========================================================================
7976 * FUNCTION : saveExifParams
7977 *
7978 * DESCRIPTION:
7979 *
7980 * PARAMETERS :
7981 * @metadata : metadata information from callback
7982 *
7983 * RETURN : none
7984 *
7985 *==========================================================================*/
7986void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7987{
7988 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7989 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7990 if (mExifParams.debug_params) {
7991 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7992 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7993 }
7994 }
7995 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7996 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7997 if (mExifParams.debug_params) {
7998 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7999 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8000 }
8001 }
8002 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8003 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8004 if (mExifParams.debug_params) {
8005 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8006 mExifParams.debug_params->af_debug_params_valid = TRUE;
8007 }
8008 }
8009 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8010 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8011 if (mExifParams.debug_params) {
8012 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8013 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8014 }
8015 }
8016 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8017 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8018 if (mExifParams.debug_params) {
8019 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8020 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8021 }
8022 }
8023 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8024 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8025 if (mExifParams.debug_params) {
8026 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8027 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8028 }
8029 }
8030 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8031 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8032 if (mExifParams.debug_params) {
8033 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8034 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8035 }
8036 }
8037 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8038 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8039 if (mExifParams.debug_params) {
8040 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8041 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8042 }
8043 }
8044}
8045
8046/*===========================================================================
8047 * FUNCTION : get3AExifParams
8048 *
8049 * DESCRIPTION:
8050 *
8051 * PARAMETERS : none
8052 *
8053 *
8054 * RETURN : mm_jpeg_exif_params_t
8055 *
8056 *==========================================================================*/
8057mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8058{
8059 return mExifParams;
8060}
8061
8062/*===========================================================================
8063 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8064 *
8065 * DESCRIPTION:
8066 *
8067 * PARAMETERS :
8068 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008069 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8070 * urgent metadata in a batch. Always true for
8071 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008072 *
8073 * RETURN : camera_metadata_t*
8074 * metadata in a format specified by fwk
8075 *==========================================================================*/
8076camera_metadata_t*
8077QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008078 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008079{
8080 CameraMetadata camMetadata;
8081 camera_metadata_t *resultMetadata;
8082
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008083 if (!lastUrgentMetadataInBatch) {
8084 /* In batch mode, use empty metadata if this is not the last in batch
8085 */
8086 resultMetadata = allocate_camera_metadata(0, 0);
8087 return resultMetadata;
8088 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008089
8090 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8091 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8092 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8093 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8094 }
8095
8096 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8097 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8098 &aecTrigger->trigger, 1);
8099 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8100 &aecTrigger->trigger_id, 1);
8101 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8102 aecTrigger->trigger);
8103 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8104 aecTrigger->trigger_id);
8105 }
8106
8107 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8108 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8109 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8110 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8111 }
8112
Thierry Strudel3d639192016-09-09 11:52:26 -07008113 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8114 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8115 &af_trigger->trigger, 1);
8116 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8117 af_trigger->trigger);
8118 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8119 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8120 af_trigger->trigger_id);
8121 }
8122
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008123 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8124 /*af regions*/
8125 int32_t afRegions[REGIONS_TUPLE_COUNT];
8126 // Adjust crop region from sensor output coordinate system to active
8127 // array coordinate system.
8128 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8129 hAfRegions->rect.width, hAfRegions->rect.height);
8130
8131 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8132 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8133 REGIONS_TUPLE_COUNT);
8134 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8135 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8136 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8137 hAfRegions->rect.height);
8138 }
8139
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008140 // AF region confidence
8141 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8142 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8143 }
8144
Thierry Strudel3d639192016-09-09 11:52:26 -07008145 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8146 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8147 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8148 if (NAME_NOT_FOUND != val) {
8149 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8150 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8151 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8152 } else {
8153 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8154 }
8155 }
8156
8157 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8158 uint32_t aeMode = CAM_AE_MODE_MAX;
8159 int32_t flashMode = CAM_FLASH_MODE_MAX;
8160 int32_t redeye = -1;
8161 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8162 aeMode = *pAeMode;
8163 }
8164 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8165 flashMode = *pFlashMode;
8166 }
8167 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8168 redeye = *pRedeye;
8169 }
8170
8171 if (1 == redeye) {
8172 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8173 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8174 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8175 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8176 flashMode);
8177 if (NAME_NOT_FOUND != val) {
8178 fwk_aeMode = (uint8_t)val;
8179 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8180 } else {
8181 LOGE("Unsupported flash mode %d", flashMode);
8182 }
8183 } else if (aeMode == CAM_AE_MODE_ON) {
8184 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8185 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8186 } else if (aeMode == CAM_AE_MODE_OFF) {
8187 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8188 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008189 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8190 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8191 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008192 } else {
8193 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8194 "flashMode:%d, aeMode:%u!!!",
8195 redeye, flashMode, aeMode);
8196 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008197 if (mInstantAEC) {
8198 // Increment frame Idx count untill a bound reached for instant AEC.
8199 mInstantAecFrameIdxCount++;
8200 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8201 CAM_INTF_META_AEC_INFO, metadata) {
8202 LOGH("ae_params->settled = %d",ae_params->settled);
8203 // If AEC settled, or if number of frames reached bound value,
8204 // should reset instant AEC.
8205 if (ae_params->settled ||
8206 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8207 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8208 mInstantAEC = false;
8209 mResetInstantAEC = true;
8210 mInstantAecFrameIdxCount = 0;
8211 }
8212 }
8213 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008214 resultMetadata = camMetadata.release();
8215 return resultMetadata;
8216}
8217
8218/*===========================================================================
8219 * FUNCTION : dumpMetadataToFile
8220 *
8221 * DESCRIPTION: Dumps tuning metadata to file system
8222 *
8223 * PARAMETERS :
8224 * @meta : tuning metadata
8225 * @dumpFrameCount : current dump frame count
8226 * @enabled : Enable mask
8227 *
8228 *==========================================================================*/
8229void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8230 uint32_t &dumpFrameCount,
8231 bool enabled,
8232 const char *type,
8233 uint32_t frameNumber)
8234{
8235 //Some sanity checks
8236 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8237 LOGE("Tuning sensor data size bigger than expected %d: %d",
8238 meta.tuning_sensor_data_size,
8239 TUNING_SENSOR_DATA_MAX);
8240 return;
8241 }
8242
8243 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8244 LOGE("Tuning VFE data size bigger than expected %d: %d",
8245 meta.tuning_vfe_data_size,
8246 TUNING_VFE_DATA_MAX);
8247 return;
8248 }
8249
8250 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8251 LOGE("Tuning CPP data size bigger than expected %d: %d",
8252 meta.tuning_cpp_data_size,
8253 TUNING_CPP_DATA_MAX);
8254 return;
8255 }
8256
8257 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8258 LOGE("Tuning CAC data size bigger than expected %d: %d",
8259 meta.tuning_cac_data_size,
8260 TUNING_CAC_DATA_MAX);
8261 return;
8262 }
8263 //
8264
8265 if(enabled){
8266 char timeBuf[FILENAME_MAX];
8267 char buf[FILENAME_MAX];
8268 memset(buf, 0, sizeof(buf));
8269 memset(timeBuf, 0, sizeof(timeBuf));
8270 time_t current_time;
8271 struct tm * timeinfo;
8272 time (&current_time);
8273 timeinfo = localtime (&current_time);
8274 if (timeinfo != NULL) {
8275 strftime (timeBuf, sizeof(timeBuf),
8276 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8277 }
8278 String8 filePath(timeBuf);
8279 snprintf(buf,
8280 sizeof(buf),
8281 "%dm_%s_%d.bin",
8282 dumpFrameCount,
8283 type,
8284 frameNumber);
8285 filePath.append(buf);
8286 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8287 if (file_fd >= 0) {
8288 ssize_t written_len = 0;
8289 meta.tuning_data_version = TUNING_DATA_VERSION;
8290 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8291 written_len += write(file_fd, data, sizeof(uint32_t));
8292 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8293 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8294 written_len += write(file_fd, data, sizeof(uint32_t));
8295 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8296 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8297 written_len += write(file_fd, data, sizeof(uint32_t));
8298 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8299 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8300 written_len += write(file_fd, data, sizeof(uint32_t));
8301 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8302 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8303 written_len += write(file_fd, data, sizeof(uint32_t));
8304 meta.tuning_mod3_data_size = 0;
8305 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8306 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8307 written_len += write(file_fd, data, sizeof(uint32_t));
8308 size_t total_size = meta.tuning_sensor_data_size;
8309 data = (void *)((uint8_t *)&meta.data);
8310 written_len += write(file_fd, data, total_size);
8311 total_size = meta.tuning_vfe_data_size;
8312 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8313 written_len += write(file_fd, data, total_size);
8314 total_size = meta.tuning_cpp_data_size;
8315 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8316 written_len += write(file_fd, data, total_size);
8317 total_size = meta.tuning_cac_data_size;
8318 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8319 written_len += write(file_fd, data, total_size);
8320 close(file_fd);
8321 }else {
8322 LOGE("fail to open file for metadata dumping");
8323 }
8324 }
8325}
8326
8327/*===========================================================================
8328 * FUNCTION : cleanAndSortStreamInfo
8329 *
8330 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8331 * and sort them such that raw stream is at the end of the list
8332 * This is a workaround for camera daemon constraint.
8333 *
8334 * PARAMETERS : None
8335 *
8336 *==========================================================================*/
8337void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8338{
8339 List<stream_info_t *> newStreamInfo;
8340
8341 /*clean up invalid streams*/
8342 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8343 it != mStreamInfo.end();) {
8344 if(((*it)->status) == INVALID){
8345 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8346 delete channel;
8347 free(*it);
8348 it = mStreamInfo.erase(it);
8349 } else {
8350 it++;
8351 }
8352 }
8353
8354 // Move preview/video/callback/snapshot streams into newList
8355 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8356 it != mStreamInfo.end();) {
8357 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8358 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8359 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8360 newStreamInfo.push_back(*it);
8361 it = mStreamInfo.erase(it);
8362 } else
8363 it++;
8364 }
8365 // Move raw streams into newList
8366 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8367 it != mStreamInfo.end();) {
8368 newStreamInfo.push_back(*it);
8369 it = mStreamInfo.erase(it);
8370 }
8371
8372 mStreamInfo = newStreamInfo;
8373}
8374
8375/*===========================================================================
8376 * FUNCTION : extractJpegMetadata
8377 *
8378 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8379 * JPEG metadata is cached in HAL, and return as part of capture
8380 * result when metadata is returned from camera daemon.
8381 *
8382 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8383 * @request: capture request
8384 *
8385 *==========================================================================*/
8386void QCamera3HardwareInterface::extractJpegMetadata(
8387 CameraMetadata& jpegMetadata,
8388 const camera3_capture_request_t *request)
8389{
8390 CameraMetadata frame_settings;
8391 frame_settings = request->settings;
8392
8393 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8394 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8395 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8396 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8397
8398 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8399 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8400 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8401 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8402
8403 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8404 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8405 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8406 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8407
8408 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8409 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8410 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8411 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8412
8413 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8414 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8415 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8416 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8417
8418 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8419 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8420 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8421 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8422
8423 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8424 int32_t thumbnail_size[2];
8425 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8426 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8427 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8428 int32_t orientation =
8429 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008430 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008431 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8432 int32_t temp;
8433 temp = thumbnail_size[0];
8434 thumbnail_size[0] = thumbnail_size[1];
8435 thumbnail_size[1] = temp;
8436 }
8437 }
8438 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8439 thumbnail_size,
8440 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8441 }
8442
8443}
8444
8445/*===========================================================================
8446 * FUNCTION : convertToRegions
8447 *
8448 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8449 *
8450 * PARAMETERS :
8451 * @rect : cam_rect_t struct to convert
8452 * @region : int32_t destination array
8453 * @weight : if we are converting from cam_area_t, weight is valid
8454 * else weight = -1
8455 *
8456 *==========================================================================*/
8457void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8458 int32_t *region, int weight)
8459{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008460 region[FACE_LEFT] = rect.left;
8461 region[FACE_TOP] = rect.top;
8462 region[FACE_RIGHT] = rect.left + rect.width;
8463 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008464 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008465 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008466 }
8467}
8468
8469/*===========================================================================
8470 * FUNCTION : convertFromRegions
8471 *
8472 * DESCRIPTION: helper method to convert from array to cam_rect_t
8473 *
8474 * PARAMETERS :
8475 * @rect : cam_rect_t struct to convert
8476 * @region : int32_t destination array
8477 * @weight : if we are converting from cam_area_t, weight is valid
8478 * else weight = -1
8479 *
8480 *==========================================================================*/
8481void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008482 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008483{
Thierry Strudel3d639192016-09-09 11:52:26 -07008484 int32_t x_min = frame_settings.find(tag).data.i32[0];
8485 int32_t y_min = frame_settings.find(tag).data.i32[1];
8486 int32_t x_max = frame_settings.find(tag).data.i32[2];
8487 int32_t y_max = frame_settings.find(tag).data.i32[3];
8488 roi.weight = frame_settings.find(tag).data.i32[4];
8489 roi.rect.left = x_min;
8490 roi.rect.top = y_min;
8491 roi.rect.width = x_max - x_min;
8492 roi.rect.height = y_max - y_min;
8493}
8494
8495/*===========================================================================
8496 * FUNCTION : resetIfNeededROI
8497 *
8498 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8499 * crop region
8500 *
8501 * PARAMETERS :
8502 * @roi : cam_area_t struct to resize
8503 * @scalerCropRegion : cam_crop_region_t region to compare against
8504 *
8505 *
8506 *==========================================================================*/
8507bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8508 const cam_crop_region_t* scalerCropRegion)
8509{
8510 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8511 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8512 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8513 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8514
8515 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8516 * without having this check the calculations below to validate if the roi
8517 * is inside scalar crop region will fail resulting in the roi not being
8518 * reset causing algorithm to continue to use stale roi window
8519 */
8520 if (roi->weight == 0) {
8521 return true;
8522 }
8523
8524 if ((roi_x_max < scalerCropRegion->left) ||
8525 // right edge of roi window is left of scalar crop's left edge
8526 (roi_y_max < scalerCropRegion->top) ||
8527 // bottom edge of roi window is above scalar crop's top edge
8528 (roi->rect.left > crop_x_max) ||
8529 // left edge of roi window is beyond(right) of scalar crop's right edge
8530 (roi->rect.top > crop_y_max)){
8531 // top edge of roi windo is above scalar crop's top edge
8532 return false;
8533 }
8534 if (roi->rect.left < scalerCropRegion->left) {
8535 roi->rect.left = scalerCropRegion->left;
8536 }
8537 if (roi->rect.top < scalerCropRegion->top) {
8538 roi->rect.top = scalerCropRegion->top;
8539 }
8540 if (roi_x_max > crop_x_max) {
8541 roi_x_max = crop_x_max;
8542 }
8543 if (roi_y_max > crop_y_max) {
8544 roi_y_max = crop_y_max;
8545 }
8546 roi->rect.width = roi_x_max - roi->rect.left;
8547 roi->rect.height = roi_y_max - roi->rect.top;
8548 return true;
8549}
8550
8551/*===========================================================================
8552 * FUNCTION : convertLandmarks
8553 *
8554 * DESCRIPTION: helper method to extract the landmarks from face detection info
8555 *
8556 * PARAMETERS :
8557 * @landmark_data : input landmark data to be converted
8558 * @landmarks : int32_t destination array
8559 *
8560 *
8561 *==========================================================================*/
8562void QCamera3HardwareInterface::convertLandmarks(
8563 cam_face_landmarks_info_t landmark_data,
8564 int32_t *landmarks)
8565{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008566 if (landmark_data.is_left_eye_valid) {
8567 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8568 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8569 } else {
8570 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8571 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8572 }
8573
8574 if (landmark_data.is_right_eye_valid) {
8575 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8576 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8577 } else {
8578 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8579 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8580 }
8581
8582 if (landmark_data.is_mouth_valid) {
8583 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8584 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8585 } else {
8586 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8587 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8588 }
8589}
8590
8591/*===========================================================================
8592 * FUNCTION : setInvalidLandmarks
8593 *
8594 * DESCRIPTION: helper method to set invalid landmarks
8595 *
8596 * PARAMETERS :
8597 * @landmarks : int32_t destination array
8598 *
8599 *
8600 *==========================================================================*/
8601void QCamera3HardwareInterface::setInvalidLandmarks(
8602 int32_t *landmarks)
8603{
8604 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8605 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8606 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8607 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8608 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8609 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008610}
8611
8612#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008613
8614/*===========================================================================
8615 * FUNCTION : getCapabilities
8616 *
8617 * DESCRIPTION: query camera capability from back-end
8618 *
8619 * PARAMETERS :
8620 * @ops : mm-interface ops structure
8621 * @cam_handle : camera handle for which we need capability
8622 *
8623 * RETURN : ptr type of capability structure
8624 * capability for success
8625 * NULL for failure
8626 *==========================================================================*/
8627cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8628 uint32_t cam_handle)
8629{
8630 int rc = NO_ERROR;
8631 QCamera3HeapMemory *capabilityHeap = NULL;
8632 cam_capability_t *cap_ptr = NULL;
8633
8634 if (ops == NULL) {
8635 LOGE("Invalid arguments");
8636 return NULL;
8637 }
8638
8639 capabilityHeap = new QCamera3HeapMemory(1);
8640 if (capabilityHeap == NULL) {
8641 LOGE("creation of capabilityHeap failed");
8642 return NULL;
8643 }
8644
8645 /* Allocate memory for capability buffer */
8646 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8647 if(rc != OK) {
8648 LOGE("No memory for cappability");
8649 goto allocate_failed;
8650 }
8651
8652 /* Map memory for capability buffer */
8653 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8654
8655 rc = ops->map_buf(cam_handle,
8656 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8657 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8658 if(rc < 0) {
8659 LOGE("failed to map capability buffer");
8660 rc = FAILED_TRANSACTION;
8661 goto map_failed;
8662 }
8663
8664 /* Query Capability */
8665 rc = ops->query_capability(cam_handle);
8666 if(rc < 0) {
8667 LOGE("failed to query capability");
8668 rc = FAILED_TRANSACTION;
8669 goto query_failed;
8670 }
8671
8672 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8673 if (cap_ptr == NULL) {
8674 LOGE("out of memory");
8675 rc = NO_MEMORY;
8676 goto query_failed;
8677 }
8678
8679 memset(cap_ptr, 0, sizeof(cam_capability_t));
8680 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8681
8682 int index;
8683 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8684 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8685 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8686 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8687 }
8688
8689query_failed:
8690 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8691map_failed:
8692 capabilityHeap->deallocate();
8693allocate_failed:
8694 delete capabilityHeap;
8695
8696 if (rc != NO_ERROR) {
8697 return NULL;
8698 } else {
8699 return cap_ptr;
8700 }
8701}
8702
Thierry Strudel3d639192016-09-09 11:52:26 -07008703/*===========================================================================
8704 * FUNCTION : initCapabilities
8705 *
8706 * DESCRIPTION: initialize camera capabilities in static data struct
8707 *
8708 * PARAMETERS :
8709 * @cameraId : camera Id
8710 *
8711 * RETURN : int32_t type of status
8712 * NO_ERROR -- success
8713 * none-zero failure code
8714 *==========================================================================*/
8715int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8716{
8717 int rc = 0;
8718 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008719 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008720
8721 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8722 if (rc) {
8723 LOGE("camera_open failed. rc = %d", rc);
8724 goto open_failed;
8725 }
8726 if (!cameraHandle) {
8727 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8728 goto open_failed;
8729 }
8730
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008731 handle = get_main_camera_handle(cameraHandle->camera_handle);
8732 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8733 if (gCamCapability[cameraId] == NULL) {
8734 rc = FAILED_TRANSACTION;
8735 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008736 }
8737
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008738 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008739 if (is_dual_camera_by_idx(cameraId)) {
8740 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8741 gCamCapability[cameraId]->aux_cam_cap =
8742 getCapabilities(cameraHandle->ops, handle);
8743 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8744 rc = FAILED_TRANSACTION;
8745 free(gCamCapability[cameraId]);
8746 goto failed_op;
8747 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008748
8749 // Copy the main camera capability to main_cam_cap struct
8750 gCamCapability[cameraId]->main_cam_cap =
8751 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8752 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8753 LOGE("out of memory");
8754 rc = NO_MEMORY;
8755 goto failed_op;
8756 }
8757 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8758 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008759 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008760failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008761 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8762 cameraHandle = NULL;
8763open_failed:
8764 return rc;
8765}
8766
8767/*==========================================================================
8768 * FUNCTION : get3Aversion
8769 *
8770 * DESCRIPTION: get the Q3A S/W version
8771 *
8772 * PARAMETERS :
8773 * @sw_version: Reference of Q3A structure which will hold version info upon
8774 * return
8775 *
8776 * RETURN : None
8777 *
8778 *==========================================================================*/
8779void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8780{
8781 if(gCamCapability[mCameraId])
8782 sw_version = gCamCapability[mCameraId]->q3a_version;
8783 else
8784 LOGE("Capability structure NULL!");
8785}
8786
8787
8788/*===========================================================================
8789 * FUNCTION : initParameters
8790 *
8791 * DESCRIPTION: initialize camera parameters
8792 *
8793 * PARAMETERS :
8794 *
8795 * RETURN : int32_t type of status
8796 * NO_ERROR -- success
8797 * none-zero failure code
8798 *==========================================================================*/
8799int QCamera3HardwareInterface::initParameters()
8800{
8801 int rc = 0;
8802
8803 //Allocate Set Param Buffer
8804 mParamHeap = new QCamera3HeapMemory(1);
8805 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8806 if(rc != OK) {
8807 rc = NO_MEMORY;
8808 LOGE("Failed to allocate SETPARM Heap memory");
8809 delete mParamHeap;
8810 mParamHeap = NULL;
8811 return rc;
8812 }
8813
8814 //Map memory for parameters buffer
8815 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8816 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8817 mParamHeap->getFd(0),
8818 sizeof(metadata_buffer_t),
8819 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8820 if(rc < 0) {
8821 LOGE("failed to map SETPARM buffer");
8822 rc = FAILED_TRANSACTION;
8823 mParamHeap->deallocate();
8824 delete mParamHeap;
8825 mParamHeap = NULL;
8826 return rc;
8827 }
8828
8829 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8830
8831 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8832 return rc;
8833}
8834
8835/*===========================================================================
8836 * FUNCTION : deinitParameters
8837 *
8838 * DESCRIPTION: de-initialize camera parameters
8839 *
8840 * PARAMETERS :
8841 *
8842 * RETURN : NONE
8843 *==========================================================================*/
8844void QCamera3HardwareInterface::deinitParameters()
8845{
8846 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8847 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8848
8849 mParamHeap->deallocate();
8850 delete mParamHeap;
8851 mParamHeap = NULL;
8852
8853 mParameters = NULL;
8854
8855 free(mPrevParameters);
8856 mPrevParameters = NULL;
8857}
8858
8859/*===========================================================================
8860 * FUNCTION : calcMaxJpegSize
8861 *
8862 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8863 *
8864 * PARAMETERS :
8865 *
8866 * RETURN : max_jpeg_size
8867 *==========================================================================*/
8868size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8869{
8870 size_t max_jpeg_size = 0;
8871 size_t temp_width, temp_height;
8872 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8873 MAX_SIZES_CNT);
8874 for (size_t i = 0; i < count; i++) {
8875 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8876 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8877 if (temp_width * temp_height > max_jpeg_size ) {
8878 max_jpeg_size = temp_width * temp_height;
8879 }
8880 }
8881 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8882 return max_jpeg_size;
8883}
8884
8885/*===========================================================================
8886 * FUNCTION : getMaxRawSize
8887 *
8888 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8889 *
8890 * PARAMETERS :
8891 *
8892 * RETURN : Largest supported Raw Dimension
8893 *==========================================================================*/
8894cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8895{
8896 int max_width = 0;
8897 cam_dimension_t maxRawSize;
8898
8899 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8900 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8901 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8902 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8903 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8904 }
8905 }
8906 return maxRawSize;
8907}
8908
8909
8910/*===========================================================================
8911 * FUNCTION : calcMaxJpegDim
8912 *
8913 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8914 *
8915 * PARAMETERS :
8916 *
8917 * RETURN : max_jpeg_dim
8918 *==========================================================================*/
8919cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8920{
8921 cam_dimension_t max_jpeg_dim;
8922 cam_dimension_t curr_jpeg_dim;
8923 max_jpeg_dim.width = 0;
8924 max_jpeg_dim.height = 0;
8925 curr_jpeg_dim.width = 0;
8926 curr_jpeg_dim.height = 0;
8927 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8928 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8929 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8930 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8931 max_jpeg_dim.width * max_jpeg_dim.height ) {
8932 max_jpeg_dim.width = curr_jpeg_dim.width;
8933 max_jpeg_dim.height = curr_jpeg_dim.height;
8934 }
8935 }
8936 return max_jpeg_dim;
8937}
8938
8939/*===========================================================================
8940 * FUNCTION : addStreamConfig
8941 *
8942 * DESCRIPTION: adds the stream configuration to the array
8943 *
8944 * PARAMETERS :
8945 * @available_stream_configs : pointer to stream configuration array
8946 * @scalar_format : scalar format
8947 * @dim : configuration dimension
8948 * @config_type : input or output configuration type
8949 *
8950 * RETURN : NONE
8951 *==========================================================================*/
8952void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8953 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8954{
8955 available_stream_configs.add(scalar_format);
8956 available_stream_configs.add(dim.width);
8957 available_stream_configs.add(dim.height);
8958 available_stream_configs.add(config_type);
8959}
8960
8961/*===========================================================================
8962 * FUNCTION : suppportBurstCapture
8963 *
8964 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8965 *
8966 * PARAMETERS :
8967 * @cameraId : camera Id
8968 *
8969 * RETURN : true if camera supports BURST_CAPTURE
8970 * false otherwise
8971 *==========================================================================*/
8972bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8973{
8974 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8975 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8976 const int32_t highResWidth = 3264;
8977 const int32_t highResHeight = 2448;
8978
8979 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8980 // Maximum resolution images cannot be captured at >= 10fps
8981 // -> not supporting BURST_CAPTURE
8982 return false;
8983 }
8984
8985 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8986 // Maximum resolution images can be captured at >= 20fps
8987 // --> supporting BURST_CAPTURE
8988 return true;
8989 }
8990
8991 // Find the smallest highRes resolution, or largest resolution if there is none
8992 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8993 MAX_SIZES_CNT);
8994 size_t highRes = 0;
8995 while ((highRes + 1 < totalCnt) &&
8996 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8997 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8998 highResWidth * highResHeight)) {
8999 highRes++;
9000 }
9001 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9002 return true;
9003 } else {
9004 return false;
9005 }
9006}
9007
9008/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009009 * FUNCTION : getPDStatIndex
9010 *
9011 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9012 *
9013 * PARAMETERS :
9014 * @caps : camera capabilities
9015 *
9016 * RETURN : int32_t type
9017 * non-negative - on success
9018 * -1 - on failure
9019 *==========================================================================*/
9020int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9021 if (nullptr == caps) {
9022 return -1;
9023 }
9024
9025 uint32_t metaRawCount = caps->meta_raw_channel_count;
9026 int32_t ret = -1;
9027 for (size_t i = 0; i < metaRawCount; i++) {
9028 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9029 ret = i;
9030 break;
9031 }
9032 }
9033
9034 return ret;
9035}
9036
9037/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009038 * FUNCTION : initStaticMetadata
9039 *
9040 * DESCRIPTION: initialize the static metadata
9041 *
9042 * PARAMETERS :
9043 * @cameraId : camera Id
9044 *
9045 * RETURN : int32_t type of status
9046 * 0 -- success
9047 * non-zero failure code
9048 *==========================================================================*/
9049int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9050{
9051 int rc = 0;
9052 CameraMetadata staticInfo;
9053 size_t count = 0;
9054 bool limitedDevice = false;
9055 char prop[PROPERTY_VALUE_MAX];
9056 bool supportBurst = false;
9057
9058 supportBurst = supportBurstCapture(cameraId);
9059
9060 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9061 * guaranteed or if min fps of max resolution is less than 20 fps, its
9062 * advertised as limited device*/
9063 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9064 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9065 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9066 !supportBurst;
9067
9068 uint8_t supportedHwLvl = limitedDevice ?
9069 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009070#ifndef USE_HAL_3_3
9071 // LEVEL_3 - This device will support level 3.
9072 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9073#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009074 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009075#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009076
9077 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9078 &supportedHwLvl, 1);
9079
9080 bool facingBack = false;
9081 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9082 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9083 facingBack = true;
9084 }
9085 /*HAL 3 only*/
9086 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9087 &gCamCapability[cameraId]->min_focus_distance, 1);
9088
9089 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9090 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9091
9092 /*should be using focal lengths but sensor doesn't provide that info now*/
9093 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9094 &gCamCapability[cameraId]->focal_length,
9095 1);
9096
9097 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9098 gCamCapability[cameraId]->apertures,
9099 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9100
9101 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9102 gCamCapability[cameraId]->filter_densities,
9103 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9104
9105
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009106 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9107 size_t mode_count =
9108 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9109 for (size_t i = 0; i < mode_count; i++) {
9110 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9111 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009112 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009113 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009114
9115 int32_t lens_shading_map_size[] = {
9116 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9117 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9118 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9119 lens_shading_map_size,
9120 sizeof(lens_shading_map_size)/sizeof(int32_t));
9121
9122 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9123 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9124
9125 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9126 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9127
9128 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9129 &gCamCapability[cameraId]->max_frame_duration, 1);
9130
9131 camera_metadata_rational baseGainFactor = {
9132 gCamCapability[cameraId]->base_gain_factor.numerator,
9133 gCamCapability[cameraId]->base_gain_factor.denominator};
9134 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9135 &baseGainFactor, 1);
9136
9137 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9138 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9139
9140 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9141 gCamCapability[cameraId]->pixel_array_size.height};
9142 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9143 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9144
9145 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9146 gCamCapability[cameraId]->active_array_size.top,
9147 gCamCapability[cameraId]->active_array_size.width,
9148 gCamCapability[cameraId]->active_array_size.height};
9149 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9150 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9151
9152 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9153 &gCamCapability[cameraId]->white_level, 1);
9154
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009155 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9156 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9157 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009158 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009159 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009160
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009161#ifndef USE_HAL_3_3
9162 bool hasBlackRegions = false;
9163 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9164 LOGW("black_region_count: %d is bounded to %d",
9165 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9166 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9167 }
9168 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9169 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9170 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9171 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9172 }
9173 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9174 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9175 hasBlackRegions = true;
9176 }
9177#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009178 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9179 &gCamCapability[cameraId]->flash_charge_duration, 1);
9180
9181 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9182 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9183
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009184 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9185 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9186 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009187 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9188 &timestampSource, 1);
9189
Thierry Strudel54dc9782017-02-15 12:12:10 -08009190 //update histogram vendor data
9191 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009192 &gCamCapability[cameraId]->histogram_size, 1);
9193
Thierry Strudel54dc9782017-02-15 12:12:10 -08009194 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009195 &gCamCapability[cameraId]->max_histogram_count, 1);
9196
Shuzhen Wang14415f52016-11-16 18:26:18 -08009197 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9198 //so that app can request fewer number of bins than the maximum supported.
9199 std::vector<int32_t> histBins;
9200 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9201 histBins.push_back(maxHistBins);
9202 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9203 (maxHistBins & 0x1) == 0) {
9204 histBins.push_back(maxHistBins >> 1);
9205 maxHistBins >>= 1;
9206 }
9207 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9208 histBins.data(), histBins.size());
9209
Thierry Strudel3d639192016-09-09 11:52:26 -07009210 int32_t sharpness_map_size[] = {
9211 gCamCapability[cameraId]->sharpness_map_size.width,
9212 gCamCapability[cameraId]->sharpness_map_size.height};
9213
9214 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9215 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9216
9217 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9218 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9219
Emilian Peev0f3c3162017-03-15 12:57:46 +00009220 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9221 if (0 <= indexPD) {
9222 // Advertise PD stats data as part of the Depth capabilities
9223 int32_t depthWidth =
9224 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9225 int32_t depthHeight =
9226 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9227 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9228 assert(0 < depthSamplesCount);
9229 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9230 &depthSamplesCount, 1);
9231
9232 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9233 depthHeight,
9234 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9235 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9236 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9237 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9238 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9239
9240 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9241 depthHeight, 33333333,
9242 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9243 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9244 depthMinDuration,
9245 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9246
9247 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9248 depthHeight, 0,
9249 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9250 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9251 depthStallDuration,
9252 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9253
9254 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9255 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9256 }
9257
Thierry Strudel3d639192016-09-09 11:52:26 -07009258 int32_t scalar_formats[] = {
9259 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9260 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9261 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9262 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9263 HAL_PIXEL_FORMAT_RAW10,
9264 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009265 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9266 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9267 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009268
9269 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9270 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9271 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9272 count, MAX_SIZES_CNT, available_processed_sizes);
9273 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9274 available_processed_sizes, count * 2);
9275
9276 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9277 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9278 makeTable(gCamCapability[cameraId]->raw_dim,
9279 count, MAX_SIZES_CNT, available_raw_sizes);
9280 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9281 available_raw_sizes, count * 2);
9282
9283 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9284 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9285 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9286 count, MAX_SIZES_CNT, available_fps_ranges);
9287 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9288 available_fps_ranges, count * 2);
9289
9290 camera_metadata_rational exposureCompensationStep = {
9291 gCamCapability[cameraId]->exp_compensation_step.numerator,
9292 gCamCapability[cameraId]->exp_compensation_step.denominator};
9293 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9294 &exposureCompensationStep, 1);
9295
9296 Vector<uint8_t> availableVstabModes;
9297 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9298 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009299 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009300 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009301 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009302 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009303 count = IS_TYPE_MAX;
9304 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9305 for (size_t i = 0; i < count; i++) {
9306 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9307 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9308 eisSupported = true;
9309 break;
9310 }
9311 }
9312 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009313 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9314 }
9315 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9316 availableVstabModes.array(), availableVstabModes.size());
9317
9318 /*HAL 1 and HAL 3 common*/
9319 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9320 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9321 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009322 // Cap the max zoom to the max preferred value
9323 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009324 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9325 &maxZoom, 1);
9326
9327 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9328 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9329
9330 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9331 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9332 max3aRegions[2] = 0; /* AF not supported */
9333 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9334 max3aRegions, 3);
9335
9336 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9337 memset(prop, 0, sizeof(prop));
9338 property_get("persist.camera.facedetect", prop, "1");
9339 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9340 LOGD("Support face detection mode: %d",
9341 supportedFaceDetectMode);
9342
9343 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009344 /* support mode should be OFF if max number of face is 0 */
9345 if (maxFaces <= 0) {
9346 supportedFaceDetectMode = 0;
9347 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009348 Vector<uint8_t> availableFaceDetectModes;
9349 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9350 if (supportedFaceDetectMode == 1) {
9351 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9352 } else if (supportedFaceDetectMode == 2) {
9353 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9354 } else if (supportedFaceDetectMode == 3) {
9355 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9356 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9357 } else {
9358 maxFaces = 0;
9359 }
9360 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9361 availableFaceDetectModes.array(),
9362 availableFaceDetectModes.size());
9363 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9364 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009365 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9366 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9367 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009368
9369 int32_t exposureCompensationRange[] = {
9370 gCamCapability[cameraId]->exposure_compensation_min,
9371 gCamCapability[cameraId]->exposure_compensation_max};
9372 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9373 exposureCompensationRange,
9374 sizeof(exposureCompensationRange)/sizeof(int32_t));
9375
9376 uint8_t lensFacing = (facingBack) ?
9377 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9378 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9379
9380 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9381 available_thumbnail_sizes,
9382 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9383
9384 /*all sizes will be clubbed into this tag*/
9385 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9386 /*android.scaler.availableStreamConfigurations*/
9387 Vector<int32_t> available_stream_configs;
9388 cam_dimension_t active_array_dim;
9389 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9390 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009391
9392 /*advertise list of input dimensions supported based on below property.
9393 By default all sizes upto 5MP will be advertised.
9394 Note that the setprop resolution format should be WxH.
9395 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9396 To list all supported sizes, setprop needs to be set with "0x0" */
9397 cam_dimension_t minInputSize = {2592,1944}; //5MP
9398 memset(prop, 0, sizeof(prop));
9399 property_get("persist.camera.input.minsize", prop, "2592x1944");
9400 if (strlen(prop) > 0) {
9401 char *saveptr = NULL;
9402 char *token = strtok_r(prop, "x", &saveptr);
9403 if (token != NULL) {
9404 minInputSize.width = atoi(token);
9405 }
9406 token = strtok_r(NULL, "x", &saveptr);
9407 if (token != NULL) {
9408 minInputSize.height = atoi(token);
9409 }
9410 }
9411
Thierry Strudel3d639192016-09-09 11:52:26 -07009412 /* Add input/output stream configurations for each scalar formats*/
9413 for (size_t j = 0; j < scalar_formats_count; j++) {
9414 switch (scalar_formats[j]) {
9415 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9416 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9417 case HAL_PIXEL_FORMAT_RAW10:
9418 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9419 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9420 addStreamConfig(available_stream_configs, scalar_formats[j],
9421 gCamCapability[cameraId]->raw_dim[i],
9422 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9423 }
9424 break;
9425 case HAL_PIXEL_FORMAT_BLOB:
9426 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9427 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9428 addStreamConfig(available_stream_configs, scalar_formats[j],
9429 gCamCapability[cameraId]->picture_sizes_tbl[i],
9430 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9431 }
9432 break;
9433 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9434 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9435 default:
9436 cam_dimension_t largest_picture_size;
9437 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9438 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9439 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9440 addStreamConfig(available_stream_configs, scalar_formats[j],
9441 gCamCapability[cameraId]->picture_sizes_tbl[i],
9442 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009443 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9444 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9445 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9446 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9447 >= minInputSize.width) || (gCamCapability[cameraId]->
9448 picture_sizes_tbl[i].height >= minInputSize.height)) {
9449 addStreamConfig(available_stream_configs, scalar_formats[j],
9450 gCamCapability[cameraId]->picture_sizes_tbl[i],
9451 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9452 }
9453 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009454 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009455
Thierry Strudel3d639192016-09-09 11:52:26 -07009456 break;
9457 }
9458 }
9459
9460 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9461 available_stream_configs.array(), available_stream_configs.size());
9462 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9463 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9464
9465 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9466 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9467
9468 /* android.scaler.availableMinFrameDurations */
9469 Vector<int64_t> available_min_durations;
9470 for (size_t j = 0; j < scalar_formats_count; j++) {
9471 switch (scalar_formats[j]) {
9472 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9473 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9474 case HAL_PIXEL_FORMAT_RAW10:
9475 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9476 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9477 available_min_durations.add(scalar_formats[j]);
9478 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9479 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9480 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9481 }
9482 break;
9483 default:
9484 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9485 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9486 available_min_durations.add(scalar_formats[j]);
9487 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9488 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9489 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9490 }
9491 break;
9492 }
9493 }
9494 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9495 available_min_durations.array(), available_min_durations.size());
9496
9497 Vector<int32_t> available_hfr_configs;
9498 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9499 int32_t fps = 0;
9500 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9501 case CAM_HFR_MODE_60FPS:
9502 fps = 60;
9503 break;
9504 case CAM_HFR_MODE_90FPS:
9505 fps = 90;
9506 break;
9507 case CAM_HFR_MODE_120FPS:
9508 fps = 120;
9509 break;
9510 case CAM_HFR_MODE_150FPS:
9511 fps = 150;
9512 break;
9513 case CAM_HFR_MODE_180FPS:
9514 fps = 180;
9515 break;
9516 case CAM_HFR_MODE_210FPS:
9517 fps = 210;
9518 break;
9519 case CAM_HFR_MODE_240FPS:
9520 fps = 240;
9521 break;
9522 case CAM_HFR_MODE_480FPS:
9523 fps = 480;
9524 break;
9525 case CAM_HFR_MODE_OFF:
9526 case CAM_HFR_MODE_MAX:
9527 default:
9528 break;
9529 }
9530
9531 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9532 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9533 /* For each HFR frame rate, need to advertise one variable fps range
9534 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9535 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9536 * set by the app. When video recording is started, [120, 120] is
9537 * set. This way sensor configuration does not change when recording
9538 * is started */
9539
9540 /* (width, height, fps_min, fps_max, batch_size_max) */
9541 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9542 j < MAX_SIZES_CNT; j++) {
9543 available_hfr_configs.add(
9544 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9545 available_hfr_configs.add(
9546 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9547 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9548 available_hfr_configs.add(fps);
9549 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9550
9551 /* (width, height, fps_min, fps_max, batch_size_max) */
9552 available_hfr_configs.add(
9553 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9554 available_hfr_configs.add(
9555 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9556 available_hfr_configs.add(fps);
9557 available_hfr_configs.add(fps);
9558 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9559 }
9560 }
9561 }
9562 //Advertise HFR capability only if the property is set
9563 memset(prop, 0, sizeof(prop));
9564 property_get("persist.camera.hal3hfr.enable", prop, "1");
9565 uint8_t hfrEnable = (uint8_t)atoi(prop);
9566
9567 if(hfrEnable && available_hfr_configs.array()) {
9568 staticInfo.update(
9569 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9570 available_hfr_configs.array(), available_hfr_configs.size());
9571 }
9572
9573 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9574 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9575 &max_jpeg_size, 1);
9576
9577 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9578 size_t size = 0;
9579 count = CAM_EFFECT_MODE_MAX;
9580 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9581 for (size_t i = 0; i < count; i++) {
9582 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9583 gCamCapability[cameraId]->supported_effects[i]);
9584 if (NAME_NOT_FOUND != val) {
9585 avail_effects[size] = (uint8_t)val;
9586 size++;
9587 }
9588 }
9589 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9590 avail_effects,
9591 size);
9592
9593 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9594 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9595 size_t supported_scene_modes_cnt = 0;
9596 count = CAM_SCENE_MODE_MAX;
9597 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9598 for (size_t i = 0; i < count; i++) {
9599 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9600 CAM_SCENE_MODE_OFF) {
9601 int val = lookupFwkName(SCENE_MODES_MAP,
9602 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9603 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009604
Thierry Strudel3d639192016-09-09 11:52:26 -07009605 if (NAME_NOT_FOUND != val) {
9606 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9607 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9608 supported_scene_modes_cnt++;
9609 }
9610 }
9611 }
9612 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9613 avail_scene_modes,
9614 supported_scene_modes_cnt);
9615
9616 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9617 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9618 supported_scene_modes_cnt,
9619 CAM_SCENE_MODE_MAX,
9620 scene_mode_overrides,
9621 supported_indexes,
9622 cameraId);
9623
9624 if (supported_scene_modes_cnt == 0) {
9625 supported_scene_modes_cnt = 1;
9626 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9627 }
9628
9629 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9630 scene_mode_overrides, supported_scene_modes_cnt * 3);
9631
9632 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9633 ANDROID_CONTROL_MODE_AUTO,
9634 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9635 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9636 available_control_modes,
9637 3);
9638
9639 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9640 size = 0;
9641 count = CAM_ANTIBANDING_MODE_MAX;
9642 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9643 for (size_t i = 0; i < count; i++) {
9644 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9645 gCamCapability[cameraId]->supported_antibandings[i]);
9646 if (NAME_NOT_FOUND != val) {
9647 avail_antibanding_modes[size] = (uint8_t)val;
9648 size++;
9649 }
9650
9651 }
9652 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9653 avail_antibanding_modes,
9654 size);
9655
9656 uint8_t avail_abberation_modes[] = {
9657 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9658 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9659 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9660 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9661 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9662 if (0 == count) {
9663 // If no aberration correction modes are available for a device, this advertise OFF mode
9664 size = 1;
9665 } else {
9666 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9667 // So, advertize all 3 modes if atleast any one mode is supported as per the
9668 // new M requirement
9669 size = 3;
9670 }
9671 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9672 avail_abberation_modes,
9673 size);
9674
9675 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9676 size = 0;
9677 count = CAM_FOCUS_MODE_MAX;
9678 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9679 for (size_t i = 0; i < count; i++) {
9680 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9681 gCamCapability[cameraId]->supported_focus_modes[i]);
9682 if (NAME_NOT_FOUND != val) {
9683 avail_af_modes[size] = (uint8_t)val;
9684 size++;
9685 }
9686 }
9687 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9688 avail_af_modes,
9689 size);
9690
9691 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9692 size = 0;
9693 count = CAM_WB_MODE_MAX;
9694 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9695 for (size_t i = 0; i < count; i++) {
9696 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9697 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9698 gCamCapability[cameraId]->supported_white_balances[i]);
9699 if (NAME_NOT_FOUND != val) {
9700 avail_awb_modes[size] = (uint8_t)val;
9701 size++;
9702 }
9703 }
9704 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9705 avail_awb_modes,
9706 size);
9707
9708 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9709 count = CAM_FLASH_FIRING_LEVEL_MAX;
9710 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9711 count);
9712 for (size_t i = 0; i < count; i++) {
9713 available_flash_levels[i] =
9714 gCamCapability[cameraId]->supported_firing_levels[i];
9715 }
9716 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9717 available_flash_levels, count);
9718
9719 uint8_t flashAvailable;
9720 if (gCamCapability[cameraId]->flash_available)
9721 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9722 else
9723 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9724 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9725 &flashAvailable, 1);
9726
9727 Vector<uint8_t> avail_ae_modes;
9728 count = CAM_AE_MODE_MAX;
9729 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9730 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009731 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9732 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9733 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9734 }
9735 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009736 }
9737 if (flashAvailable) {
9738 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9739 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9740 }
9741 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9742 avail_ae_modes.array(),
9743 avail_ae_modes.size());
9744
9745 int32_t sensitivity_range[2];
9746 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9747 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9748 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9749 sensitivity_range,
9750 sizeof(sensitivity_range) / sizeof(int32_t));
9751
9752 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9753 &gCamCapability[cameraId]->max_analog_sensitivity,
9754 1);
9755
9756 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9757 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9758 &sensor_orientation,
9759 1);
9760
9761 int32_t max_output_streams[] = {
9762 MAX_STALLING_STREAMS,
9763 MAX_PROCESSED_STREAMS,
9764 MAX_RAW_STREAMS};
9765 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9766 max_output_streams,
9767 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9768
9769 uint8_t avail_leds = 0;
9770 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9771 &avail_leds, 0);
9772
9773 uint8_t focus_dist_calibrated;
9774 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9775 gCamCapability[cameraId]->focus_dist_calibrated);
9776 if (NAME_NOT_FOUND != val) {
9777 focus_dist_calibrated = (uint8_t)val;
9778 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9779 &focus_dist_calibrated, 1);
9780 }
9781
9782 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9783 size = 0;
9784 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9785 MAX_TEST_PATTERN_CNT);
9786 for (size_t i = 0; i < count; i++) {
9787 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9788 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9789 if (NAME_NOT_FOUND != testpatternMode) {
9790 avail_testpattern_modes[size] = testpatternMode;
9791 size++;
9792 }
9793 }
9794 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9795 avail_testpattern_modes,
9796 size);
9797
9798 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9799 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9800 &max_pipeline_depth,
9801 1);
9802
9803 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9804 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9805 &partial_result_count,
9806 1);
9807
9808 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9809 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9810
9811 Vector<uint8_t> available_capabilities;
9812 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9813 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9814 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9815 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9816 if (supportBurst) {
9817 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9818 }
9819 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9820 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9821 if (hfrEnable && available_hfr_configs.array()) {
9822 available_capabilities.add(
9823 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9824 }
9825
9826 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9827 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9828 }
9829 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9830 available_capabilities.array(),
9831 available_capabilities.size());
9832
9833 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9834 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9835 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9836 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9837
9838 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9839 &aeLockAvailable, 1);
9840
9841 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9842 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9843 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9844 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9845
9846 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9847 &awbLockAvailable, 1);
9848
9849 int32_t max_input_streams = 1;
9850 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9851 &max_input_streams,
9852 1);
9853
9854 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9855 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9856 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9857 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9858 HAL_PIXEL_FORMAT_YCbCr_420_888};
9859 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9860 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9861
9862 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9863 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9864 &max_latency,
9865 1);
9866
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009867#ifndef USE_HAL_3_3
9868 int32_t isp_sensitivity_range[2];
9869 isp_sensitivity_range[0] =
9870 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9871 isp_sensitivity_range[1] =
9872 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9873 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9874 isp_sensitivity_range,
9875 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9876#endif
9877
Thierry Strudel3d639192016-09-09 11:52:26 -07009878 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9879 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9880 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9881 available_hot_pixel_modes,
9882 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9883
9884 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9885 ANDROID_SHADING_MODE_FAST,
9886 ANDROID_SHADING_MODE_HIGH_QUALITY};
9887 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9888 available_shading_modes,
9889 3);
9890
9891 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9892 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9893 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9894 available_lens_shading_map_modes,
9895 2);
9896
9897 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9898 ANDROID_EDGE_MODE_FAST,
9899 ANDROID_EDGE_MODE_HIGH_QUALITY,
9900 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9901 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9902 available_edge_modes,
9903 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9904
9905 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9906 ANDROID_NOISE_REDUCTION_MODE_FAST,
9907 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9908 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9909 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9910 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9911 available_noise_red_modes,
9912 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9913
9914 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9915 ANDROID_TONEMAP_MODE_FAST,
9916 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9917 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9918 available_tonemap_modes,
9919 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9920
9921 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9922 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9923 available_hot_pixel_map_modes,
9924 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9925
9926 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9927 gCamCapability[cameraId]->reference_illuminant1);
9928 if (NAME_NOT_FOUND != val) {
9929 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9930 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9931 }
9932
9933 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9934 gCamCapability[cameraId]->reference_illuminant2);
9935 if (NAME_NOT_FOUND != val) {
9936 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9937 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9938 }
9939
9940 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9941 (void *)gCamCapability[cameraId]->forward_matrix1,
9942 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9943
9944 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9945 (void *)gCamCapability[cameraId]->forward_matrix2,
9946 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9947
9948 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9949 (void *)gCamCapability[cameraId]->color_transform1,
9950 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9951
9952 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9953 (void *)gCamCapability[cameraId]->color_transform2,
9954 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9955
9956 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9957 (void *)gCamCapability[cameraId]->calibration_transform1,
9958 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9959
9960 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9961 (void *)gCamCapability[cameraId]->calibration_transform2,
9962 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9963
9964 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9965 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9966 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9967 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9968 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9969 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9970 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9971 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9972 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9973 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9974 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9975 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9976 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9977 ANDROID_JPEG_GPS_COORDINATES,
9978 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9979 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9980 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9981 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9982 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9983 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9984 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9985 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9986 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9987 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009988#ifndef USE_HAL_3_3
9989 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9990#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009991 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009992 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009993 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9994 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009995 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009996 /* DevCamDebug metadata request_keys_basic */
9997 DEVCAMDEBUG_META_ENABLE,
9998 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009999 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010000 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010001 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010002 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -080010003 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010004
10005 size_t request_keys_cnt =
10006 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10007 Vector<int32_t> available_request_keys;
10008 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10009 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10010 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10011 }
10012
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010013 if (gExposeEnableZslKey) {
Chien-Yu Chened0a4c92017-05-01 18:25:03 +000010014 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010015 }
10016
Thierry Strudel3d639192016-09-09 11:52:26 -070010017 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10018 available_request_keys.array(), available_request_keys.size());
10019
10020 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10021 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10022 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10023 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10024 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10025 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10026 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10027 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10028 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10029 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10030 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10031 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10032 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10033 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10034 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10035 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10036 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010037 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010038 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10039 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10040 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010041 ANDROID_STATISTICS_FACE_SCORES,
10042#ifndef USE_HAL_3_3
10043 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10044#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010045 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010046 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010047 // DevCamDebug metadata result_keys_basic
10048 DEVCAMDEBUG_META_ENABLE,
10049 // DevCamDebug metadata result_keys AF
10050 DEVCAMDEBUG_AF_LENS_POSITION,
10051 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10052 DEVCAMDEBUG_AF_TOF_DISTANCE,
10053 DEVCAMDEBUG_AF_LUMA,
10054 DEVCAMDEBUG_AF_HAF_STATE,
10055 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10056 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10057 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10058 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10059 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10060 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10061 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10062 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10063 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10064 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10065 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10066 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10067 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10068 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10069 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10070 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10071 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10072 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10073 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10074 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10075 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10076 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10077 // DevCamDebug metadata result_keys AEC
10078 DEVCAMDEBUG_AEC_TARGET_LUMA,
10079 DEVCAMDEBUG_AEC_COMP_LUMA,
10080 DEVCAMDEBUG_AEC_AVG_LUMA,
10081 DEVCAMDEBUG_AEC_CUR_LUMA,
10082 DEVCAMDEBUG_AEC_LINECOUNT,
10083 DEVCAMDEBUG_AEC_REAL_GAIN,
10084 DEVCAMDEBUG_AEC_EXP_INDEX,
10085 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010086 // DevCamDebug metadata result_keys zzHDR
10087 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10088 DEVCAMDEBUG_AEC_L_LINECOUNT,
10089 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10090 DEVCAMDEBUG_AEC_S_LINECOUNT,
10091 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10092 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10093 // DevCamDebug metadata result_keys ADRC
10094 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10095 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10096 DEVCAMDEBUG_AEC_GTM_RATIO,
10097 DEVCAMDEBUG_AEC_LTM_RATIO,
10098 DEVCAMDEBUG_AEC_LA_RATIO,
10099 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010100 // DevCamDebug metadata result_keys AWB
10101 DEVCAMDEBUG_AWB_R_GAIN,
10102 DEVCAMDEBUG_AWB_G_GAIN,
10103 DEVCAMDEBUG_AWB_B_GAIN,
10104 DEVCAMDEBUG_AWB_CCT,
10105 DEVCAMDEBUG_AWB_DECISION,
10106 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010107 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10108 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10109 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010110 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010111 };
10112
Thierry Strudel3d639192016-09-09 11:52:26 -070010113 size_t result_keys_cnt =
10114 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10115
10116 Vector<int32_t> available_result_keys;
10117 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10118 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10119 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10120 }
10121 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10122 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10123 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10124 }
10125 if (supportedFaceDetectMode == 1) {
10126 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10127 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10128 } else if ((supportedFaceDetectMode == 2) ||
10129 (supportedFaceDetectMode == 3)) {
10130 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10131 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10132 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010133#ifndef USE_HAL_3_3
10134 if (hasBlackRegions) {
10135 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10136 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10137 }
10138#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010139
10140 if (gExposeEnableZslKey) {
10141 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10142 }
10143
Thierry Strudel3d639192016-09-09 11:52:26 -070010144 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10145 available_result_keys.array(), available_result_keys.size());
10146
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010147 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010148 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10149 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10150 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10151 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10152 ANDROID_SCALER_CROPPING_TYPE,
10153 ANDROID_SYNC_MAX_LATENCY,
10154 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10155 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10156 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10157 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10158 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10159 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10160 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10161 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10162 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10163 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10164 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10165 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10166 ANDROID_LENS_FACING,
10167 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10168 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10169 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10170 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10171 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10172 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10173 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10174 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10175 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10176 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10177 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10178 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10179 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10180 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10181 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10182 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10183 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10184 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10185 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10186 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010187 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010188 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10189 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10190 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10191 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10192 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10193 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10194 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10195 ANDROID_CONTROL_AVAILABLE_MODES,
10196 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10197 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10198 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10199 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010200 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10201#ifndef USE_HAL_3_3
10202 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10203 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10204#endif
10205 };
10206
10207 Vector<int32_t> available_characteristics_keys;
10208 available_characteristics_keys.appendArray(characteristics_keys_basic,
10209 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10210#ifndef USE_HAL_3_3
10211 if (hasBlackRegions) {
10212 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10213 }
10214#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010215
10216 if (0 <= indexPD) {
10217 int32_t depthKeys[] = {
10218 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10219 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10220 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10221 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10222 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10223 };
10224 available_characteristics_keys.appendArray(depthKeys,
10225 sizeof(depthKeys) / sizeof(depthKeys[0]));
10226 }
10227
Thierry Strudel3d639192016-09-09 11:52:26 -070010228 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010229 available_characteristics_keys.array(),
10230 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010231
10232 /*available stall durations depend on the hw + sw and will be different for different devices */
10233 /*have to add for raw after implementation*/
10234 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10235 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10236
10237 Vector<int64_t> available_stall_durations;
10238 for (uint32_t j = 0; j < stall_formats_count; j++) {
10239 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10240 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10241 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10242 available_stall_durations.add(stall_formats[j]);
10243 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10244 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10245 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10246 }
10247 } else {
10248 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10249 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10250 available_stall_durations.add(stall_formats[j]);
10251 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10252 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10253 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10254 }
10255 }
10256 }
10257 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10258 available_stall_durations.array(),
10259 available_stall_durations.size());
10260
10261 //QCAMERA3_OPAQUE_RAW
10262 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10263 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10264 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10265 case LEGACY_RAW:
10266 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10267 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10268 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10269 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10270 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10271 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10272 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10273 break;
10274 case MIPI_RAW:
10275 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10276 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10277 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10278 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10279 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10280 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10281 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10282 break;
10283 default:
10284 LOGE("unknown opaque_raw_format %d",
10285 gCamCapability[cameraId]->opaque_raw_fmt);
10286 break;
10287 }
10288 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10289
10290 Vector<int32_t> strides;
10291 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10292 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10293 cam_stream_buf_plane_info_t buf_planes;
10294 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10295 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10296 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10297 &gCamCapability[cameraId]->padding_info, &buf_planes);
10298 strides.add(buf_planes.plane_info.mp[0].stride);
10299 }
10300 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10301 strides.size());
10302
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010303 //TBD: remove the following line once backend advertises zzHDR in feature mask
10304 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010305 //Video HDR default
10306 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10307 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010308 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010309 int32_t vhdr_mode[] = {
10310 QCAMERA3_VIDEO_HDR_MODE_OFF,
10311 QCAMERA3_VIDEO_HDR_MODE_ON};
10312
10313 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10314 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10315 vhdr_mode, vhdr_mode_count);
10316 }
10317
Thierry Strudel3d639192016-09-09 11:52:26 -070010318 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10319 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10320 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10321
10322 uint8_t isMonoOnly =
10323 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10324 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10325 &isMonoOnly, 1);
10326
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010327#ifndef USE_HAL_3_3
10328 Vector<int32_t> opaque_size;
10329 for (size_t j = 0; j < scalar_formats_count; j++) {
10330 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10331 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10332 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10333 cam_stream_buf_plane_info_t buf_planes;
10334
10335 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10336 &gCamCapability[cameraId]->padding_info, &buf_planes);
10337
10338 if (rc == 0) {
10339 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10340 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10341 opaque_size.add(buf_planes.plane_info.frame_len);
10342 }else {
10343 LOGE("raw frame calculation failed!");
10344 }
10345 }
10346 }
10347 }
10348
10349 if ((opaque_size.size() > 0) &&
10350 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10351 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10352 else
10353 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10354#endif
10355
Thierry Strudel04e026f2016-10-10 11:27:36 -070010356 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10357 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10358 size = 0;
10359 count = CAM_IR_MODE_MAX;
10360 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10361 for (size_t i = 0; i < count; i++) {
10362 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10363 gCamCapability[cameraId]->supported_ir_modes[i]);
10364 if (NAME_NOT_FOUND != val) {
10365 avail_ir_modes[size] = (int32_t)val;
10366 size++;
10367 }
10368 }
10369 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10370 avail_ir_modes, size);
10371 }
10372
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010373 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10374 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10375 size = 0;
10376 count = CAM_AEC_CONVERGENCE_MAX;
10377 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10378 for (size_t i = 0; i < count; i++) {
10379 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10380 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10381 if (NAME_NOT_FOUND != val) {
10382 available_instant_aec_modes[size] = (int32_t)val;
10383 size++;
10384 }
10385 }
10386 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10387 available_instant_aec_modes, size);
10388 }
10389
Thierry Strudel54dc9782017-02-15 12:12:10 -080010390 int32_t sharpness_range[] = {
10391 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10392 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10393 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10394
10395 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10396 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10397 size = 0;
10398 count = CAM_BINNING_CORRECTION_MODE_MAX;
10399 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10400 for (size_t i = 0; i < count; i++) {
10401 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10402 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10403 gCamCapability[cameraId]->supported_binning_modes[i]);
10404 if (NAME_NOT_FOUND != val) {
10405 avail_binning_modes[size] = (int32_t)val;
10406 size++;
10407 }
10408 }
10409 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10410 avail_binning_modes, size);
10411 }
10412
10413 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10414 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10415 size = 0;
10416 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10417 for (size_t i = 0; i < count; i++) {
10418 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10419 gCamCapability[cameraId]->supported_aec_modes[i]);
10420 if (NAME_NOT_FOUND != val)
10421 available_aec_modes[size++] = val;
10422 }
10423 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10424 available_aec_modes, size);
10425 }
10426
10427 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10428 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10429 size = 0;
10430 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10431 for (size_t i = 0; i < count; i++) {
10432 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10433 gCamCapability[cameraId]->supported_iso_modes[i]);
10434 if (NAME_NOT_FOUND != val)
10435 available_iso_modes[size++] = val;
10436 }
10437 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10438 available_iso_modes, size);
10439 }
10440
10441 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010442 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010443 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10444 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10445 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10446
10447 int32_t available_saturation_range[4];
10448 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10449 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10450 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10451 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10452 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10453 available_saturation_range, 4);
10454
10455 uint8_t is_hdr_values[2];
10456 is_hdr_values[0] = 0;
10457 is_hdr_values[1] = 1;
10458 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10459 is_hdr_values, 2);
10460
10461 float is_hdr_confidence_range[2];
10462 is_hdr_confidence_range[0] = 0.0;
10463 is_hdr_confidence_range[1] = 1.0;
10464 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10465 is_hdr_confidence_range, 2);
10466
Emilian Peev0a972ef2017-03-16 10:25:53 +000010467 size_t eepromLength = strnlen(
10468 reinterpret_cast<const char *>(
10469 gCamCapability[cameraId]->eeprom_version_info),
10470 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10471 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010472 char easelInfo[] = ",E:N";
10473 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10474 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10475 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010476 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10477 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010478 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010479 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10480 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10481 }
10482
Thierry Strudel3d639192016-09-09 11:52:26 -070010483 gStaticMetadata[cameraId] = staticInfo.release();
10484 return rc;
10485}
10486
10487/*===========================================================================
10488 * FUNCTION : makeTable
10489 *
10490 * DESCRIPTION: make a table of sizes
10491 *
10492 * PARAMETERS :
10493 *
10494 *
10495 *==========================================================================*/
10496void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10497 size_t max_size, int32_t *sizeTable)
10498{
10499 size_t j = 0;
10500 if (size > max_size) {
10501 size = max_size;
10502 }
10503 for (size_t i = 0; i < size; i++) {
10504 sizeTable[j] = dimTable[i].width;
10505 sizeTable[j+1] = dimTable[i].height;
10506 j+=2;
10507 }
10508}
10509
10510/*===========================================================================
10511 * FUNCTION : makeFPSTable
10512 *
10513 * DESCRIPTION: make a table of fps ranges
10514 *
10515 * PARAMETERS :
10516 *
10517 *==========================================================================*/
10518void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10519 size_t max_size, int32_t *fpsRangesTable)
10520{
10521 size_t j = 0;
10522 if (size > max_size) {
10523 size = max_size;
10524 }
10525 for (size_t i = 0; i < size; i++) {
10526 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10527 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10528 j+=2;
10529 }
10530}
10531
10532/*===========================================================================
10533 * FUNCTION : makeOverridesList
10534 *
10535 * DESCRIPTION: make a list of scene mode overrides
10536 *
10537 * PARAMETERS :
10538 *
10539 *
10540 *==========================================================================*/
10541void QCamera3HardwareInterface::makeOverridesList(
10542 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10543 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10544{
10545 /*daemon will give a list of overrides for all scene modes.
10546 However we should send the fwk only the overrides for the scene modes
10547 supported by the framework*/
10548 size_t j = 0;
10549 if (size > max_size) {
10550 size = max_size;
10551 }
10552 size_t focus_count = CAM_FOCUS_MODE_MAX;
10553 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10554 focus_count);
10555 for (size_t i = 0; i < size; i++) {
10556 bool supt = false;
10557 size_t index = supported_indexes[i];
10558 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10559 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10560 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10561 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10562 overridesTable[index].awb_mode);
10563 if (NAME_NOT_FOUND != val) {
10564 overridesList[j+1] = (uint8_t)val;
10565 }
10566 uint8_t focus_override = overridesTable[index].af_mode;
10567 for (size_t k = 0; k < focus_count; k++) {
10568 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10569 supt = true;
10570 break;
10571 }
10572 }
10573 if (supt) {
10574 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10575 focus_override);
10576 if (NAME_NOT_FOUND != val) {
10577 overridesList[j+2] = (uint8_t)val;
10578 }
10579 } else {
10580 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10581 }
10582 j+=3;
10583 }
10584}
10585
10586/*===========================================================================
10587 * FUNCTION : filterJpegSizes
10588 *
10589 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10590 * could be downscaled to
10591 *
10592 * PARAMETERS :
10593 *
10594 * RETURN : length of jpegSizes array
10595 *==========================================================================*/
10596
10597size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10598 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10599 uint8_t downscale_factor)
10600{
10601 if (0 == downscale_factor) {
10602 downscale_factor = 1;
10603 }
10604
10605 int32_t min_width = active_array_size.width / downscale_factor;
10606 int32_t min_height = active_array_size.height / downscale_factor;
10607 size_t jpegSizesCnt = 0;
10608 if (processedSizesCnt > maxCount) {
10609 processedSizesCnt = maxCount;
10610 }
10611 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10612 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10613 jpegSizes[jpegSizesCnt] = processedSizes[i];
10614 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10615 jpegSizesCnt += 2;
10616 }
10617 }
10618 return jpegSizesCnt;
10619}
10620
10621/*===========================================================================
10622 * FUNCTION : computeNoiseModelEntryS
10623 *
10624 * DESCRIPTION: function to map a given sensitivity to the S noise
10625 * model parameters in the DNG noise model.
10626 *
10627 * PARAMETERS : sens : the sensor sensitivity
10628 *
10629 ** RETURN : S (sensor amplification) noise
10630 *
10631 *==========================================================================*/
10632double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10633 double s = gCamCapability[mCameraId]->gradient_S * sens +
10634 gCamCapability[mCameraId]->offset_S;
10635 return ((s < 0.0) ? 0.0 : s);
10636}
10637
10638/*===========================================================================
10639 * FUNCTION : computeNoiseModelEntryO
10640 *
10641 * DESCRIPTION: function to map a given sensitivity to the O noise
10642 * model parameters in the DNG noise model.
10643 *
10644 * PARAMETERS : sens : the sensor sensitivity
10645 *
10646 ** RETURN : O (sensor readout) noise
10647 *
10648 *==========================================================================*/
10649double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10650 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10651 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10652 1.0 : (1.0 * sens / max_analog_sens);
10653 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10654 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10655 return ((o < 0.0) ? 0.0 : o);
10656}
10657
10658/*===========================================================================
10659 * FUNCTION : getSensorSensitivity
10660 *
10661 * DESCRIPTION: convert iso_mode to an integer value
10662 *
10663 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10664 *
10665 ** RETURN : sensitivity supported by sensor
10666 *
10667 *==========================================================================*/
10668int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10669{
10670 int32_t sensitivity;
10671
10672 switch (iso_mode) {
10673 case CAM_ISO_MODE_100:
10674 sensitivity = 100;
10675 break;
10676 case CAM_ISO_MODE_200:
10677 sensitivity = 200;
10678 break;
10679 case CAM_ISO_MODE_400:
10680 sensitivity = 400;
10681 break;
10682 case CAM_ISO_MODE_800:
10683 sensitivity = 800;
10684 break;
10685 case CAM_ISO_MODE_1600:
10686 sensitivity = 1600;
10687 break;
10688 default:
10689 sensitivity = -1;
10690 break;
10691 }
10692 return sensitivity;
10693}
10694
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010695int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010696 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010697 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10698 // to connect to Easel.
10699 bool doNotpowerOnEasel =
10700 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10701
10702 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010703 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10704 return OK;
10705 }
10706
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010707 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010708 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010709 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010710 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010711 return res;
10712 }
10713
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010714 EaselManagerClientOpened = true;
10715
10716 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010717 if (res != OK) {
10718 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10719 }
10720
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010721 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010722 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010723
10724 // Expose enableZsl key only when HDR+ mode is enabled.
10725 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010726 }
10727
10728 return OK;
10729}
10730
Thierry Strudel3d639192016-09-09 11:52:26 -070010731/*===========================================================================
10732 * FUNCTION : getCamInfo
10733 *
10734 * DESCRIPTION: query camera capabilities
10735 *
10736 * PARAMETERS :
10737 * @cameraId : camera Id
10738 * @info : camera info struct to be filled in with camera capabilities
10739 *
10740 * RETURN : int type of status
10741 * NO_ERROR -- success
10742 * none-zero failure code
10743 *==========================================================================*/
10744int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10745 struct camera_info *info)
10746{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010747 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010748 int rc = 0;
10749
10750 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010751
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010752 {
10753 Mutex::Autolock l(gHdrPlusClientLock);
10754 rc = initHdrPlusClientLocked();
10755 if (rc != OK) {
10756 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10757 pthread_mutex_unlock(&gCamLock);
10758 return rc;
10759 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010760 }
10761
Thierry Strudel3d639192016-09-09 11:52:26 -070010762 if (NULL == gCamCapability[cameraId]) {
10763 rc = initCapabilities(cameraId);
10764 if (rc < 0) {
10765 pthread_mutex_unlock(&gCamLock);
10766 return rc;
10767 }
10768 }
10769
10770 if (NULL == gStaticMetadata[cameraId]) {
10771 rc = initStaticMetadata(cameraId);
10772 if (rc < 0) {
10773 pthread_mutex_unlock(&gCamLock);
10774 return rc;
10775 }
10776 }
10777
10778 switch(gCamCapability[cameraId]->position) {
10779 case CAM_POSITION_BACK:
10780 case CAM_POSITION_BACK_AUX:
10781 info->facing = CAMERA_FACING_BACK;
10782 break;
10783
10784 case CAM_POSITION_FRONT:
10785 case CAM_POSITION_FRONT_AUX:
10786 info->facing = CAMERA_FACING_FRONT;
10787 break;
10788
10789 default:
10790 LOGE("Unknown position type %d for camera id:%d",
10791 gCamCapability[cameraId]->position, cameraId);
10792 rc = -1;
10793 break;
10794 }
10795
10796
10797 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010798#ifndef USE_HAL_3_3
10799 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10800#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010801 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010802#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010803 info->static_camera_characteristics = gStaticMetadata[cameraId];
10804
10805 //For now assume both cameras can operate independently.
10806 info->conflicting_devices = NULL;
10807 info->conflicting_devices_length = 0;
10808
10809 //resource cost is 100 * MIN(1.0, m/M),
10810 //where m is throughput requirement with maximum stream configuration
10811 //and M is CPP maximum throughput.
10812 float max_fps = 0.0;
10813 for (uint32_t i = 0;
10814 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10815 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10816 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10817 }
10818 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10819 gCamCapability[cameraId]->active_array_size.width *
10820 gCamCapability[cameraId]->active_array_size.height * max_fps /
10821 gCamCapability[cameraId]->max_pixel_bandwidth;
10822 info->resource_cost = 100 * MIN(1.0, ratio);
10823 LOGI("camera %d resource cost is %d", cameraId,
10824 info->resource_cost);
10825
10826 pthread_mutex_unlock(&gCamLock);
10827 return rc;
10828}
10829
10830/*===========================================================================
10831 * FUNCTION : translateCapabilityToMetadata
10832 *
10833 * DESCRIPTION: translate the capability into camera_metadata_t
10834 *
10835 * PARAMETERS : type of the request
10836 *
10837 *
10838 * RETURN : success: camera_metadata_t*
10839 * failure: NULL
10840 *
10841 *==========================================================================*/
10842camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10843{
10844 if (mDefaultMetadata[type] != NULL) {
10845 return mDefaultMetadata[type];
10846 }
10847 //first time we are handling this request
10848 //fill up the metadata structure using the wrapper class
10849 CameraMetadata settings;
10850 //translate from cam_capability_t to camera_metadata_tag_t
10851 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10852 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10853 int32_t defaultRequestID = 0;
10854 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10855
10856 /* OIS disable */
10857 char ois_prop[PROPERTY_VALUE_MAX];
10858 memset(ois_prop, 0, sizeof(ois_prop));
10859 property_get("persist.camera.ois.disable", ois_prop, "0");
10860 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10861
10862 /* Force video to use OIS */
10863 char videoOisProp[PROPERTY_VALUE_MAX];
10864 memset(videoOisProp, 0, sizeof(videoOisProp));
10865 property_get("persist.camera.ois.video", videoOisProp, "1");
10866 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010867
10868 // Hybrid AE enable/disable
10869 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10870 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10871 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10872 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10873
Thierry Strudel3d639192016-09-09 11:52:26 -070010874 uint8_t controlIntent = 0;
10875 uint8_t focusMode;
10876 uint8_t vsMode;
10877 uint8_t optStabMode;
10878 uint8_t cacMode;
10879 uint8_t edge_mode;
10880 uint8_t noise_red_mode;
10881 uint8_t tonemap_mode;
10882 bool highQualityModeEntryAvailable = FALSE;
10883 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010884 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010885 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10886 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010887 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010888 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010889 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010890
Thierry Strudel3d639192016-09-09 11:52:26 -070010891 switch (type) {
10892 case CAMERA3_TEMPLATE_PREVIEW:
10893 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10894 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10895 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10896 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10897 edge_mode = ANDROID_EDGE_MODE_FAST;
10898 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10899 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10900 break;
10901 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10902 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10903 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10904 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10905 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10906 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10907 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10908 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10909 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10910 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10911 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10912 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10913 highQualityModeEntryAvailable = TRUE;
10914 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10915 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10916 fastModeEntryAvailable = TRUE;
10917 }
10918 }
10919 if (highQualityModeEntryAvailable) {
10920 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10921 } else if (fastModeEntryAvailable) {
10922 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10923 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010924 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10925 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10926 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010927 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010928 break;
10929 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10930 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10931 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10932 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010933 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10934 edge_mode = ANDROID_EDGE_MODE_FAST;
10935 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10936 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10937 if (forceVideoOis)
10938 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10939 break;
10940 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10941 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10942 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10943 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010944 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10945 edge_mode = ANDROID_EDGE_MODE_FAST;
10946 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10947 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10948 if (forceVideoOis)
10949 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10950 break;
10951 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10952 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10953 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10954 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10955 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10956 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10957 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10958 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10959 break;
10960 case CAMERA3_TEMPLATE_MANUAL:
10961 edge_mode = ANDROID_EDGE_MODE_FAST;
10962 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10963 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10964 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10965 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10966 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10967 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10968 break;
10969 default:
10970 edge_mode = ANDROID_EDGE_MODE_FAST;
10971 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10972 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10973 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10974 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10975 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10976 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10977 break;
10978 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010979 // Set CAC to OFF if underlying device doesn't support
10980 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10981 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10982 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010983 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10984 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10985 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10986 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10987 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10988 }
10989 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010990 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010991 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010992
10993 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10994 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10995 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10996 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10997 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10998 || ois_disable)
10999 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11000 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011001 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011002
11003 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11004 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11005
11006 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11007 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11008
11009 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11010 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11011
11012 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11013 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11014
11015 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11016 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11017
11018 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11019 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11020
11021 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11022 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11023
11024 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11025 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11026
11027 /*flash*/
11028 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11029 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11030
11031 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11032 settings.update(ANDROID_FLASH_FIRING_POWER,
11033 &flashFiringLevel, 1);
11034
11035 /* lens */
11036 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11037 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11038
11039 if (gCamCapability[mCameraId]->filter_densities_count) {
11040 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11041 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11042 gCamCapability[mCameraId]->filter_densities_count);
11043 }
11044
11045 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11046 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11047
Thierry Strudel3d639192016-09-09 11:52:26 -070011048 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11049 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11050
11051 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11052 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11053
11054 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11055 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11056
11057 /* face detection (default to OFF) */
11058 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11059 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11060
Thierry Strudel54dc9782017-02-15 12:12:10 -080011061 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11062 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011063
11064 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11065 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11066
11067 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11068 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11069
Thierry Strudel3d639192016-09-09 11:52:26 -070011070
11071 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11072 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11073
11074 /* Exposure time(Update the Min Exposure Time)*/
11075 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11076 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11077
11078 /* frame duration */
11079 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11080 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11081
11082 /* sensitivity */
11083 static const int32_t default_sensitivity = 100;
11084 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011085#ifndef USE_HAL_3_3
11086 static const int32_t default_isp_sensitivity =
11087 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11088 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11089#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011090
11091 /*edge mode*/
11092 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11093
11094 /*noise reduction mode*/
11095 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11096
11097 /*color correction mode*/
11098 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11099 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11100
11101 /*transform matrix mode*/
11102 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11103
11104 int32_t scaler_crop_region[4];
11105 scaler_crop_region[0] = 0;
11106 scaler_crop_region[1] = 0;
11107 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11108 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11109 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11110
11111 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11112 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11113
11114 /*focus distance*/
11115 float focus_distance = 0.0;
11116 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11117
11118 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011119 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011120 float max_range = 0.0;
11121 float max_fixed_fps = 0.0;
11122 int32_t fps_range[2] = {0, 0};
11123 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11124 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011125 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11126 TEMPLATE_MAX_PREVIEW_FPS) {
11127 continue;
11128 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011129 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11130 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11131 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11132 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11133 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11134 if (range > max_range) {
11135 fps_range[0] =
11136 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11137 fps_range[1] =
11138 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11139 max_range = range;
11140 }
11141 } else {
11142 if (range < 0.01 && max_fixed_fps <
11143 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11144 fps_range[0] =
11145 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11146 fps_range[1] =
11147 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11148 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11149 }
11150 }
11151 }
11152 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11153
11154 /*precapture trigger*/
11155 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11156 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11157
11158 /*af trigger*/
11159 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11160 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11161
11162 /* ae & af regions */
11163 int32_t active_region[] = {
11164 gCamCapability[mCameraId]->active_array_size.left,
11165 gCamCapability[mCameraId]->active_array_size.top,
11166 gCamCapability[mCameraId]->active_array_size.left +
11167 gCamCapability[mCameraId]->active_array_size.width,
11168 gCamCapability[mCameraId]->active_array_size.top +
11169 gCamCapability[mCameraId]->active_array_size.height,
11170 0};
11171 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11172 sizeof(active_region) / sizeof(active_region[0]));
11173 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11174 sizeof(active_region) / sizeof(active_region[0]));
11175
11176 /* black level lock */
11177 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11178 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11179
Thierry Strudel3d639192016-09-09 11:52:26 -070011180 //special defaults for manual template
11181 if (type == CAMERA3_TEMPLATE_MANUAL) {
11182 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11183 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11184
11185 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11186 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11187
11188 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11189 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11190
11191 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11192 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11193
11194 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11195 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11196
11197 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11198 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11199 }
11200
11201
11202 /* TNR
11203 * We'll use this location to determine which modes TNR will be set.
11204 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11205 * This is not to be confused with linking on a per stream basis that decision
11206 * is still on per-session basis and will be handled as part of config stream
11207 */
11208 uint8_t tnr_enable = 0;
11209
11210 if (m_bTnrPreview || m_bTnrVideo) {
11211
11212 switch (type) {
11213 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11214 tnr_enable = 1;
11215 break;
11216
11217 default:
11218 tnr_enable = 0;
11219 break;
11220 }
11221
11222 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11223 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11224 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11225
11226 LOGD("TNR:%d with process plate %d for template:%d",
11227 tnr_enable, tnr_process_type, type);
11228 }
11229
11230 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011231 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011232 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11233
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011234 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011235 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11236
Shuzhen Wang920ea402017-05-03 08:49:39 -070011237 uint8_t related_camera_id = mCameraId;
11238 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011239
11240 /* CDS default */
11241 char prop[PROPERTY_VALUE_MAX];
11242 memset(prop, 0, sizeof(prop));
11243 property_get("persist.camera.CDS", prop, "Auto");
11244 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11245 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11246 if (CAM_CDS_MODE_MAX == cds_mode) {
11247 cds_mode = CAM_CDS_MODE_AUTO;
11248 }
11249
11250 /* Disabling CDS in templates which have TNR enabled*/
11251 if (tnr_enable)
11252 cds_mode = CAM_CDS_MODE_OFF;
11253
11254 int32_t mode = cds_mode;
11255 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011256
Thierry Strudel269c81a2016-10-12 12:13:59 -070011257 /* Manual Convergence AEC Speed is disabled by default*/
11258 float default_aec_speed = 0;
11259 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11260
11261 /* Manual Convergence AWB Speed is disabled by default*/
11262 float default_awb_speed = 0;
11263 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11264
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011265 // Set instant AEC to normal convergence by default
11266 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11267 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11268
Shuzhen Wang19463d72016-03-08 11:09:52 -080011269 /* hybrid ae */
11270 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11271
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011272 if (gExposeEnableZslKey) {
11273 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11274 }
11275
Thierry Strudel3d639192016-09-09 11:52:26 -070011276 mDefaultMetadata[type] = settings.release();
11277
11278 return mDefaultMetadata[type];
11279}
11280
11281/*===========================================================================
11282 * FUNCTION : setFrameParameters
11283 *
11284 * DESCRIPTION: set parameters per frame as requested in the metadata from
11285 * framework
11286 *
11287 * PARAMETERS :
11288 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011289 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011290 * @blob_request: Whether this request is a blob request or not
11291 *
11292 * RETURN : success: NO_ERROR
11293 * failure:
11294 *==========================================================================*/
11295int QCamera3HardwareInterface::setFrameParameters(
11296 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011297 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011298 int blob_request,
11299 uint32_t snapshotStreamId)
11300{
11301 /*translate from camera_metadata_t type to parm_type_t*/
11302 int rc = 0;
11303 int32_t hal_version = CAM_HAL_V3;
11304
11305 clear_metadata_buffer(mParameters);
11306 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11307 LOGE("Failed to set hal version in the parameters");
11308 return BAD_VALUE;
11309 }
11310
11311 /*we need to update the frame number in the parameters*/
11312 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11313 request->frame_number)) {
11314 LOGE("Failed to set the frame number in the parameters");
11315 return BAD_VALUE;
11316 }
11317
11318 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011319 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011320 LOGE("Failed to set stream type mask in the parameters");
11321 return BAD_VALUE;
11322 }
11323
11324 if (mUpdateDebugLevel) {
11325 uint32_t dummyDebugLevel = 0;
11326 /* The value of dummyDebugLevel is irrelavent. On
11327 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11328 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11329 dummyDebugLevel)) {
11330 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11331 return BAD_VALUE;
11332 }
11333 mUpdateDebugLevel = false;
11334 }
11335
11336 if(request->settings != NULL){
11337 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11338 if (blob_request)
11339 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11340 }
11341
11342 return rc;
11343}
11344
11345/*===========================================================================
11346 * FUNCTION : setReprocParameters
11347 *
11348 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11349 * return it.
11350 *
11351 * PARAMETERS :
11352 * @request : request that needs to be serviced
11353 *
11354 * RETURN : success: NO_ERROR
11355 * failure:
11356 *==========================================================================*/
11357int32_t QCamera3HardwareInterface::setReprocParameters(
11358 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11359 uint32_t snapshotStreamId)
11360{
11361 /*translate from camera_metadata_t type to parm_type_t*/
11362 int rc = 0;
11363
11364 if (NULL == request->settings){
11365 LOGE("Reprocess settings cannot be NULL");
11366 return BAD_VALUE;
11367 }
11368
11369 if (NULL == reprocParam) {
11370 LOGE("Invalid reprocessing metadata buffer");
11371 return BAD_VALUE;
11372 }
11373 clear_metadata_buffer(reprocParam);
11374
11375 /*we need to update the frame number in the parameters*/
11376 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11377 request->frame_number)) {
11378 LOGE("Failed to set the frame number in the parameters");
11379 return BAD_VALUE;
11380 }
11381
11382 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11383 if (rc < 0) {
11384 LOGE("Failed to translate reproc request");
11385 return rc;
11386 }
11387
11388 CameraMetadata frame_settings;
11389 frame_settings = request->settings;
11390 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11391 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11392 int32_t *crop_count =
11393 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11394 int32_t *crop_data =
11395 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11396 int32_t *roi_map =
11397 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11398 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11399 cam_crop_data_t crop_meta;
11400 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11401 crop_meta.num_of_streams = 1;
11402 crop_meta.crop_info[0].crop.left = crop_data[0];
11403 crop_meta.crop_info[0].crop.top = crop_data[1];
11404 crop_meta.crop_info[0].crop.width = crop_data[2];
11405 crop_meta.crop_info[0].crop.height = crop_data[3];
11406
11407 crop_meta.crop_info[0].roi_map.left =
11408 roi_map[0];
11409 crop_meta.crop_info[0].roi_map.top =
11410 roi_map[1];
11411 crop_meta.crop_info[0].roi_map.width =
11412 roi_map[2];
11413 crop_meta.crop_info[0].roi_map.height =
11414 roi_map[3];
11415
11416 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11417 rc = BAD_VALUE;
11418 }
11419 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11420 request->input_buffer->stream,
11421 crop_meta.crop_info[0].crop.left,
11422 crop_meta.crop_info[0].crop.top,
11423 crop_meta.crop_info[0].crop.width,
11424 crop_meta.crop_info[0].crop.height);
11425 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11426 request->input_buffer->stream,
11427 crop_meta.crop_info[0].roi_map.left,
11428 crop_meta.crop_info[0].roi_map.top,
11429 crop_meta.crop_info[0].roi_map.width,
11430 crop_meta.crop_info[0].roi_map.height);
11431 } else {
11432 LOGE("Invalid reprocess crop count %d!", *crop_count);
11433 }
11434 } else {
11435 LOGE("No crop data from matching output stream");
11436 }
11437
11438 /* These settings are not needed for regular requests so handle them specially for
11439 reprocess requests; information needed for EXIF tags */
11440 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11441 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11442 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11443 if (NAME_NOT_FOUND != val) {
11444 uint32_t flashMode = (uint32_t)val;
11445 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11446 rc = BAD_VALUE;
11447 }
11448 } else {
11449 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11450 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11451 }
11452 } else {
11453 LOGH("No flash mode in reprocess settings");
11454 }
11455
11456 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11457 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11458 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11459 rc = BAD_VALUE;
11460 }
11461 } else {
11462 LOGH("No flash state in reprocess settings");
11463 }
11464
11465 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11466 uint8_t *reprocessFlags =
11467 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11468 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11469 *reprocessFlags)) {
11470 rc = BAD_VALUE;
11471 }
11472 }
11473
Thierry Strudel54dc9782017-02-15 12:12:10 -080011474 // Add exif debug data to internal metadata
11475 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11476 mm_jpeg_debug_exif_params_t *debug_params =
11477 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11478 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11479 // AE
11480 if (debug_params->ae_debug_params_valid == TRUE) {
11481 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11482 debug_params->ae_debug_params);
11483 }
11484 // AWB
11485 if (debug_params->awb_debug_params_valid == TRUE) {
11486 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11487 debug_params->awb_debug_params);
11488 }
11489 // AF
11490 if (debug_params->af_debug_params_valid == TRUE) {
11491 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11492 debug_params->af_debug_params);
11493 }
11494 // ASD
11495 if (debug_params->asd_debug_params_valid == TRUE) {
11496 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11497 debug_params->asd_debug_params);
11498 }
11499 // Stats
11500 if (debug_params->stats_debug_params_valid == TRUE) {
11501 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11502 debug_params->stats_debug_params);
11503 }
11504 // BE Stats
11505 if (debug_params->bestats_debug_params_valid == TRUE) {
11506 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11507 debug_params->bestats_debug_params);
11508 }
11509 // BHIST
11510 if (debug_params->bhist_debug_params_valid == TRUE) {
11511 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11512 debug_params->bhist_debug_params);
11513 }
11514 // 3A Tuning
11515 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11516 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11517 debug_params->q3a_tuning_debug_params);
11518 }
11519 }
11520
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011521 // Add metadata which reprocess needs
11522 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11523 cam_reprocess_info_t *repro_info =
11524 (cam_reprocess_info_t *)frame_settings.find
11525 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011526 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011527 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011528 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011529 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011530 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011531 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011532 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011533 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011534 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011535 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011536 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011537 repro_info->pipeline_flip);
11538 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11539 repro_info->af_roi);
11540 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11541 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011542 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11543 CAM_INTF_PARM_ROTATION metadata then has been added in
11544 translateToHalMetadata. HAL need to keep this new rotation
11545 metadata. Otherwise, the old rotation info saved in the vendor tag
11546 would be used */
11547 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11548 CAM_INTF_PARM_ROTATION, reprocParam) {
11549 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11550 } else {
11551 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011552 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011553 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011554 }
11555
11556 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11557 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11558 roi.width and roi.height would be the final JPEG size.
11559 For now, HAL only checks this for reprocess request */
11560 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11561 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11562 uint8_t *enable =
11563 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11564 if (*enable == TRUE) {
11565 int32_t *crop_data =
11566 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11567 cam_stream_crop_info_t crop_meta;
11568 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11569 crop_meta.stream_id = 0;
11570 crop_meta.crop.left = crop_data[0];
11571 crop_meta.crop.top = crop_data[1];
11572 crop_meta.crop.width = crop_data[2];
11573 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011574 // The JPEG crop roi should match cpp output size
11575 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11576 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11577 crop_meta.roi_map.left = 0;
11578 crop_meta.roi_map.top = 0;
11579 crop_meta.roi_map.width = cpp_crop->crop.width;
11580 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011581 }
11582 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11583 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011584 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011585 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011586 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11587 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011588 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011589 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11590
11591 // Add JPEG scale information
11592 cam_dimension_t scale_dim;
11593 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11594 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11595 int32_t *roi =
11596 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11597 scale_dim.width = roi[2];
11598 scale_dim.height = roi[3];
11599 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11600 scale_dim);
11601 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11602 scale_dim.width, scale_dim.height, mCameraId);
11603 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011604 }
11605 }
11606
11607 return rc;
11608}
11609
11610/*===========================================================================
11611 * FUNCTION : saveRequestSettings
11612 *
11613 * DESCRIPTION: Add any settings that might have changed to the request settings
11614 * and save the settings to be applied on the frame
11615 *
11616 * PARAMETERS :
11617 * @jpegMetadata : the extracted and/or modified jpeg metadata
11618 * @request : request with initial settings
11619 *
11620 * RETURN :
11621 * camera_metadata_t* : pointer to the saved request settings
11622 *==========================================================================*/
11623camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11624 const CameraMetadata &jpegMetadata,
11625 camera3_capture_request_t *request)
11626{
11627 camera_metadata_t *resultMetadata;
11628 CameraMetadata camMetadata;
11629 camMetadata = request->settings;
11630
11631 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11632 int32_t thumbnail_size[2];
11633 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11634 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11635 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11636 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11637 }
11638
11639 if (request->input_buffer != NULL) {
11640 uint8_t reprocessFlags = 1;
11641 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11642 (uint8_t*)&reprocessFlags,
11643 sizeof(reprocessFlags));
11644 }
11645
11646 resultMetadata = camMetadata.release();
11647 return resultMetadata;
11648}
11649
11650/*===========================================================================
11651 * FUNCTION : setHalFpsRange
11652 *
11653 * DESCRIPTION: set FPS range parameter
11654 *
11655 *
11656 * PARAMETERS :
11657 * @settings : Metadata from framework
11658 * @hal_metadata: Metadata buffer
11659 *
11660 *
11661 * RETURN : success: NO_ERROR
11662 * failure:
11663 *==========================================================================*/
11664int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11665 metadata_buffer_t *hal_metadata)
11666{
11667 int32_t rc = NO_ERROR;
11668 cam_fps_range_t fps_range;
11669 fps_range.min_fps = (float)
11670 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11671 fps_range.max_fps = (float)
11672 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11673 fps_range.video_min_fps = fps_range.min_fps;
11674 fps_range.video_max_fps = fps_range.max_fps;
11675
11676 LOGD("aeTargetFpsRange fps: [%f %f]",
11677 fps_range.min_fps, fps_range.max_fps);
11678 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11679 * follows:
11680 * ---------------------------------------------------------------|
11681 * Video stream is absent in configure_streams |
11682 * (Camcorder preview before the first video record |
11683 * ---------------------------------------------------------------|
11684 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11685 * | | | vid_min/max_fps|
11686 * ---------------------------------------------------------------|
11687 * NO | [ 30, 240] | 240 | [240, 240] |
11688 * |-------------|-------------|----------------|
11689 * | [240, 240] | 240 | [240, 240] |
11690 * ---------------------------------------------------------------|
11691 * Video stream is present in configure_streams |
11692 * ---------------------------------------------------------------|
11693 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11694 * | | | vid_min/max_fps|
11695 * ---------------------------------------------------------------|
11696 * NO | [ 30, 240] | 240 | [240, 240] |
11697 * (camcorder prev |-------------|-------------|----------------|
11698 * after video rec | [240, 240] | 240 | [240, 240] |
11699 * is stopped) | | | |
11700 * ---------------------------------------------------------------|
11701 * YES | [ 30, 240] | 240 | [240, 240] |
11702 * |-------------|-------------|----------------|
11703 * | [240, 240] | 240 | [240, 240] |
11704 * ---------------------------------------------------------------|
11705 * When Video stream is absent in configure_streams,
11706 * preview fps = sensor_fps / batchsize
11707 * Eg: for 240fps at batchSize 4, preview = 60fps
11708 * for 120fps at batchSize 4, preview = 30fps
11709 *
11710 * When video stream is present in configure_streams, preview fps is as per
11711 * the ratio of preview buffers to video buffers requested in process
11712 * capture request
11713 */
11714 mBatchSize = 0;
11715 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11716 fps_range.min_fps = fps_range.video_max_fps;
11717 fps_range.video_min_fps = fps_range.video_max_fps;
11718 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11719 fps_range.max_fps);
11720 if (NAME_NOT_FOUND != val) {
11721 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11722 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11723 return BAD_VALUE;
11724 }
11725
11726 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11727 /* If batchmode is currently in progress and the fps changes,
11728 * set the flag to restart the sensor */
11729 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11730 (mHFRVideoFps != fps_range.max_fps)) {
11731 mNeedSensorRestart = true;
11732 }
11733 mHFRVideoFps = fps_range.max_fps;
11734 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11735 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11736 mBatchSize = MAX_HFR_BATCH_SIZE;
11737 }
11738 }
11739 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11740
11741 }
11742 } else {
11743 /* HFR mode is session param in backend/ISP. This should be reset when
11744 * in non-HFR mode */
11745 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11746 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11747 return BAD_VALUE;
11748 }
11749 }
11750 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11751 return BAD_VALUE;
11752 }
11753 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11754 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11755 return rc;
11756}
11757
11758/*===========================================================================
11759 * FUNCTION : translateToHalMetadata
11760 *
11761 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11762 *
11763 *
11764 * PARAMETERS :
11765 * @request : request sent from framework
11766 *
11767 *
11768 * RETURN : success: NO_ERROR
11769 * failure:
11770 *==========================================================================*/
11771int QCamera3HardwareInterface::translateToHalMetadata
11772 (const camera3_capture_request_t *request,
11773 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011774 uint32_t snapshotStreamId) {
11775 if (request == nullptr || hal_metadata == nullptr) {
11776 return BAD_VALUE;
11777 }
11778
11779 int64_t minFrameDuration = getMinFrameDuration(request);
11780
11781 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11782 minFrameDuration);
11783}
11784
11785int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11786 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11787 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11788
Thierry Strudel3d639192016-09-09 11:52:26 -070011789 int rc = 0;
11790 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011791 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011792
11793 /* Do not change the order of the following list unless you know what you are
11794 * doing.
11795 * The order is laid out in such a way that parameters in the front of the table
11796 * may be used to override the parameters later in the table. Examples are:
11797 * 1. META_MODE should precede AEC/AWB/AF MODE
11798 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11799 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11800 * 4. Any mode should precede it's corresponding settings
11801 */
11802 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11803 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11804 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11805 rc = BAD_VALUE;
11806 }
11807 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11808 if (rc != NO_ERROR) {
11809 LOGE("extractSceneMode failed");
11810 }
11811 }
11812
11813 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11814 uint8_t fwk_aeMode =
11815 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11816 uint8_t aeMode;
11817 int32_t redeye;
11818
11819 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11820 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011821 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11822 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011823 } else {
11824 aeMode = CAM_AE_MODE_ON;
11825 }
11826 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11827 redeye = 1;
11828 } else {
11829 redeye = 0;
11830 }
11831
11832 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11833 fwk_aeMode);
11834 if (NAME_NOT_FOUND != val) {
11835 int32_t flashMode = (int32_t)val;
11836 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11837 }
11838
11839 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11840 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11841 rc = BAD_VALUE;
11842 }
11843 }
11844
11845 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11846 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11847 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11848 fwk_whiteLevel);
11849 if (NAME_NOT_FOUND != val) {
11850 uint8_t whiteLevel = (uint8_t)val;
11851 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11852 rc = BAD_VALUE;
11853 }
11854 }
11855 }
11856
11857 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11858 uint8_t fwk_cacMode =
11859 frame_settings.find(
11860 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11861 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11862 fwk_cacMode);
11863 if (NAME_NOT_FOUND != val) {
11864 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11865 bool entryAvailable = FALSE;
11866 // Check whether Frameworks set CAC mode is supported in device or not
11867 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11868 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11869 entryAvailable = TRUE;
11870 break;
11871 }
11872 }
11873 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11874 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11875 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11876 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11877 if (entryAvailable == FALSE) {
11878 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11879 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11880 } else {
11881 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11882 // High is not supported and so set the FAST as spec say's underlying
11883 // device implementation can be the same for both modes.
11884 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11885 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11886 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11887 // in order to avoid the fps drop due to high quality
11888 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11889 } else {
11890 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11891 }
11892 }
11893 }
11894 LOGD("Final cacMode is %d", cacMode);
11895 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11896 rc = BAD_VALUE;
11897 }
11898 } else {
11899 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11900 }
11901 }
11902
Thierry Strudel2896d122017-02-23 19:18:03 -080011903 char af_value[PROPERTY_VALUE_MAX];
11904 property_get("persist.camera.af.infinity", af_value, "0");
11905
Jason Lee84ae9972017-02-24 13:24:24 -080011906 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011907 if (atoi(af_value) == 0) {
11908 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011909 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011910 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11911 fwk_focusMode);
11912 if (NAME_NOT_FOUND != val) {
11913 uint8_t focusMode = (uint8_t)val;
11914 LOGD("set focus mode %d", focusMode);
11915 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11916 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11917 rc = BAD_VALUE;
11918 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011919 }
11920 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011921 } else {
11922 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11923 LOGE("Focus forced to infinity %d", focusMode);
11924 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11925 rc = BAD_VALUE;
11926 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011927 }
11928
Jason Lee84ae9972017-02-24 13:24:24 -080011929 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11930 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011931 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11932 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11933 focalDistance)) {
11934 rc = BAD_VALUE;
11935 }
11936 }
11937
11938 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11939 uint8_t fwk_antibandingMode =
11940 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11941 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11942 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11943 if (NAME_NOT_FOUND != val) {
11944 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011945 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11946 if (m60HzZone) {
11947 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11948 } else {
11949 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11950 }
11951 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011952 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11953 hal_antibandingMode)) {
11954 rc = BAD_VALUE;
11955 }
11956 }
11957 }
11958
11959 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11960 int32_t expCompensation = frame_settings.find(
11961 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11962 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11963 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11964 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11965 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011966 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011967 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11968 expCompensation)) {
11969 rc = BAD_VALUE;
11970 }
11971 }
11972
11973 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11974 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11975 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11976 rc = BAD_VALUE;
11977 }
11978 }
11979 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11980 rc = setHalFpsRange(frame_settings, hal_metadata);
11981 if (rc != NO_ERROR) {
11982 LOGE("setHalFpsRange failed");
11983 }
11984 }
11985
11986 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11987 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11988 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11989 rc = BAD_VALUE;
11990 }
11991 }
11992
11993 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11994 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11995 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11996 fwk_effectMode);
11997 if (NAME_NOT_FOUND != val) {
11998 uint8_t effectMode = (uint8_t)val;
11999 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12000 rc = BAD_VALUE;
12001 }
12002 }
12003 }
12004
12005 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12006 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12007 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12008 colorCorrectMode)) {
12009 rc = BAD_VALUE;
12010 }
12011 }
12012
12013 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12014 cam_color_correct_gains_t colorCorrectGains;
12015 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12016 colorCorrectGains.gains[i] =
12017 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12018 }
12019 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12020 colorCorrectGains)) {
12021 rc = BAD_VALUE;
12022 }
12023 }
12024
12025 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12026 cam_color_correct_matrix_t colorCorrectTransform;
12027 cam_rational_type_t transform_elem;
12028 size_t num = 0;
12029 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12030 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12031 transform_elem.numerator =
12032 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12033 transform_elem.denominator =
12034 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12035 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12036 num++;
12037 }
12038 }
12039 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12040 colorCorrectTransform)) {
12041 rc = BAD_VALUE;
12042 }
12043 }
12044
12045 cam_trigger_t aecTrigger;
12046 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12047 aecTrigger.trigger_id = -1;
12048 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12049 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12050 aecTrigger.trigger =
12051 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12052 aecTrigger.trigger_id =
12053 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12054 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12055 aecTrigger)) {
12056 rc = BAD_VALUE;
12057 }
12058 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12059 aecTrigger.trigger, aecTrigger.trigger_id);
12060 }
12061
12062 /*af_trigger must come with a trigger id*/
12063 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12064 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12065 cam_trigger_t af_trigger;
12066 af_trigger.trigger =
12067 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12068 af_trigger.trigger_id =
12069 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12070 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12071 rc = BAD_VALUE;
12072 }
12073 LOGD("AfTrigger: %d AfTriggerID: %d",
12074 af_trigger.trigger, af_trigger.trigger_id);
12075 }
12076
12077 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12078 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12079 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12080 rc = BAD_VALUE;
12081 }
12082 }
12083 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12084 cam_edge_application_t edge_application;
12085 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012086
Thierry Strudel3d639192016-09-09 11:52:26 -070012087 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12088 edge_application.sharpness = 0;
12089 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012090 edge_application.sharpness =
12091 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12092 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12093 int32_t sharpness =
12094 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12095 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12096 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12097 LOGD("Setting edge mode sharpness %d", sharpness);
12098 edge_application.sharpness = sharpness;
12099 }
12100 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012101 }
12102 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12103 rc = BAD_VALUE;
12104 }
12105 }
12106
12107 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12108 int32_t respectFlashMode = 1;
12109 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12110 uint8_t fwk_aeMode =
12111 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012112 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12113 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12114 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012115 respectFlashMode = 0;
12116 LOGH("AE Mode controls flash, ignore android.flash.mode");
12117 }
12118 }
12119 if (respectFlashMode) {
12120 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12121 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12122 LOGH("flash mode after mapping %d", val);
12123 // To check: CAM_INTF_META_FLASH_MODE usage
12124 if (NAME_NOT_FOUND != val) {
12125 uint8_t flashMode = (uint8_t)val;
12126 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12127 rc = BAD_VALUE;
12128 }
12129 }
12130 }
12131 }
12132
12133 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12134 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12135 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12136 rc = BAD_VALUE;
12137 }
12138 }
12139
12140 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12141 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12142 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12143 flashFiringTime)) {
12144 rc = BAD_VALUE;
12145 }
12146 }
12147
12148 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12149 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12150 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12151 hotPixelMode)) {
12152 rc = BAD_VALUE;
12153 }
12154 }
12155
12156 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12157 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12158 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12159 lensAperture)) {
12160 rc = BAD_VALUE;
12161 }
12162 }
12163
12164 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12165 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12166 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12167 filterDensity)) {
12168 rc = BAD_VALUE;
12169 }
12170 }
12171
12172 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12173 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12174 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12175 focalLength)) {
12176 rc = BAD_VALUE;
12177 }
12178 }
12179
12180 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12181 uint8_t optStabMode =
12182 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12183 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12184 optStabMode)) {
12185 rc = BAD_VALUE;
12186 }
12187 }
12188
12189 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12190 uint8_t videoStabMode =
12191 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12192 LOGD("videoStabMode from APP = %d", videoStabMode);
12193 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12194 videoStabMode)) {
12195 rc = BAD_VALUE;
12196 }
12197 }
12198
12199
12200 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12201 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12202 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12203 noiseRedMode)) {
12204 rc = BAD_VALUE;
12205 }
12206 }
12207
12208 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12209 float reprocessEffectiveExposureFactor =
12210 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12211 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12212 reprocessEffectiveExposureFactor)) {
12213 rc = BAD_VALUE;
12214 }
12215 }
12216
12217 cam_crop_region_t scalerCropRegion;
12218 bool scalerCropSet = false;
12219 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12220 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12221 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12222 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12223 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12224
12225 // Map coordinate system from active array to sensor output.
12226 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12227 scalerCropRegion.width, scalerCropRegion.height);
12228
12229 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12230 scalerCropRegion)) {
12231 rc = BAD_VALUE;
12232 }
12233 scalerCropSet = true;
12234 }
12235
12236 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12237 int64_t sensorExpTime =
12238 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12239 LOGD("setting sensorExpTime %lld", sensorExpTime);
12240 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12241 sensorExpTime)) {
12242 rc = BAD_VALUE;
12243 }
12244 }
12245
12246 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12247 int64_t sensorFrameDuration =
12248 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012249 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12250 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12251 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12252 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12253 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12254 sensorFrameDuration)) {
12255 rc = BAD_VALUE;
12256 }
12257 }
12258
12259 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12260 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12261 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12262 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12263 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12264 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12265 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12266 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12267 sensorSensitivity)) {
12268 rc = BAD_VALUE;
12269 }
12270 }
12271
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012272#ifndef USE_HAL_3_3
12273 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12274 int32_t ispSensitivity =
12275 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12276 if (ispSensitivity <
12277 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12278 ispSensitivity =
12279 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12280 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12281 }
12282 if (ispSensitivity >
12283 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12284 ispSensitivity =
12285 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12286 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12287 }
12288 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12289 ispSensitivity)) {
12290 rc = BAD_VALUE;
12291 }
12292 }
12293#endif
12294
Thierry Strudel3d639192016-09-09 11:52:26 -070012295 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12296 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12297 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12298 rc = BAD_VALUE;
12299 }
12300 }
12301
12302 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12303 uint8_t fwk_facedetectMode =
12304 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12305
12306 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12307 fwk_facedetectMode);
12308
12309 if (NAME_NOT_FOUND != val) {
12310 uint8_t facedetectMode = (uint8_t)val;
12311 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12312 facedetectMode)) {
12313 rc = BAD_VALUE;
12314 }
12315 }
12316 }
12317
Thierry Strudel54dc9782017-02-15 12:12:10 -080012318 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012319 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012320 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012321 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12322 histogramMode)) {
12323 rc = BAD_VALUE;
12324 }
12325 }
12326
12327 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12328 uint8_t sharpnessMapMode =
12329 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12330 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12331 sharpnessMapMode)) {
12332 rc = BAD_VALUE;
12333 }
12334 }
12335
12336 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12337 uint8_t tonemapMode =
12338 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12339 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12340 rc = BAD_VALUE;
12341 }
12342 }
12343 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12344 /*All tonemap channels will have the same number of points*/
12345 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12346 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12347 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12348 cam_rgb_tonemap_curves tonemapCurves;
12349 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12350 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12351 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12352 tonemapCurves.tonemap_points_cnt,
12353 CAM_MAX_TONEMAP_CURVE_SIZE);
12354 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12355 }
12356
12357 /* ch0 = G*/
12358 size_t point = 0;
12359 cam_tonemap_curve_t tonemapCurveGreen;
12360 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12361 for (size_t j = 0; j < 2; j++) {
12362 tonemapCurveGreen.tonemap_points[i][j] =
12363 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12364 point++;
12365 }
12366 }
12367 tonemapCurves.curves[0] = tonemapCurveGreen;
12368
12369 /* ch 1 = B */
12370 point = 0;
12371 cam_tonemap_curve_t tonemapCurveBlue;
12372 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12373 for (size_t j = 0; j < 2; j++) {
12374 tonemapCurveBlue.tonemap_points[i][j] =
12375 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12376 point++;
12377 }
12378 }
12379 tonemapCurves.curves[1] = tonemapCurveBlue;
12380
12381 /* ch 2 = R */
12382 point = 0;
12383 cam_tonemap_curve_t tonemapCurveRed;
12384 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12385 for (size_t j = 0; j < 2; j++) {
12386 tonemapCurveRed.tonemap_points[i][j] =
12387 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12388 point++;
12389 }
12390 }
12391 tonemapCurves.curves[2] = tonemapCurveRed;
12392
12393 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12394 tonemapCurves)) {
12395 rc = BAD_VALUE;
12396 }
12397 }
12398
12399 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12400 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12401 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12402 captureIntent)) {
12403 rc = BAD_VALUE;
12404 }
12405 }
12406
12407 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12408 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12409 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12410 blackLevelLock)) {
12411 rc = BAD_VALUE;
12412 }
12413 }
12414
12415 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12416 uint8_t lensShadingMapMode =
12417 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12418 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12419 lensShadingMapMode)) {
12420 rc = BAD_VALUE;
12421 }
12422 }
12423
12424 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12425 cam_area_t roi;
12426 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012427 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012428
12429 // Map coordinate system from active array to sensor output.
12430 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12431 roi.rect.height);
12432
12433 if (scalerCropSet) {
12434 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12435 }
12436 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12437 rc = BAD_VALUE;
12438 }
12439 }
12440
12441 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12442 cam_area_t roi;
12443 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012444 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012445
12446 // Map coordinate system from active array to sensor output.
12447 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12448 roi.rect.height);
12449
12450 if (scalerCropSet) {
12451 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12452 }
12453 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12454 rc = BAD_VALUE;
12455 }
12456 }
12457
12458 // CDS for non-HFR non-video mode
12459 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12460 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12461 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12462 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12463 LOGE("Invalid CDS mode %d!", *fwk_cds);
12464 } else {
12465 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12466 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12467 rc = BAD_VALUE;
12468 }
12469 }
12470 }
12471
Thierry Strudel04e026f2016-10-10 11:27:36 -070012472 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012473 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012474 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012475 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12476 }
12477 if (m_bVideoHdrEnabled)
12478 vhdr = CAM_VIDEO_HDR_MODE_ON;
12479
Thierry Strudel54dc9782017-02-15 12:12:10 -080012480 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12481
12482 if(vhdr != curr_hdr_state)
12483 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12484
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012485 rc = setVideoHdrMode(mParameters, vhdr);
12486 if (rc != NO_ERROR) {
12487 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012488 }
12489
12490 //IR
12491 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12492 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12493 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012494 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12495 uint8_t isIRon = 0;
12496
12497 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012498 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12499 LOGE("Invalid IR mode %d!", fwk_ir);
12500 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012501 if(isIRon != curr_ir_state )
12502 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12503
Thierry Strudel04e026f2016-10-10 11:27:36 -070012504 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12505 CAM_INTF_META_IR_MODE, fwk_ir)) {
12506 rc = BAD_VALUE;
12507 }
12508 }
12509 }
12510
Thierry Strudel54dc9782017-02-15 12:12:10 -080012511 //Binning Correction Mode
12512 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12513 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12514 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12515 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12516 || (0 > fwk_binning_correction)) {
12517 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12518 } else {
12519 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12520 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12521 rc = BAD_VALUE;
12522 }
12523 }
12524 }
12525
Thierry Strudel269c81a2016-10-12 12:13:59 -070012526 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12527 float aec_speed;
12528 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12529 LOGD("AEC Speed :%f", aec_speed);
12530 if ( aec_speed < 0 ) {
12531 LOGE("Invalid AEC mode %f!", aec_speed);
12532 } else {
12533 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12534 aec_speed)) {
12535 rc = BAD_VALUE;
12536 }
12537 }
12538 }
12539
12540 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12541 float awb_speed;
12542 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12543 LOGD("AWB Speed :%f", awb_speed);
12544 if ( awb_speed < 0 ) {
12545 LOGE("Invalid AWB mode %f!", awb_speed);
12546 } else {
12547 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12548 awb_speed)) {
12549 rc = BAD_VALUE;
12550 }
12551 }
12552 }
12553
Thierry Strudel3d639192016-09-09 11:52:26 -070012554 // TNR
12555 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12556 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12557 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012558 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012559 cam_denoise_param_t tnr;
12560 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12561 tnr.process_plates =
12562 (cam_denoise_process_type_t)frame_settings.find(
12563 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12564 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012565
12566 if(b_TnrRequested != curr_tnr_state)
12567 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12568
Thierry Strudel3d639192016-09-09 11:52:26 -070012569 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12570 rc = BAD_VALUE;
12571 }
12572 }
12573
Thierry Strudel54dc9782017-02-15 12:12:10 -080012574 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012575 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012576 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012577 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12578 *exposure_metering_mode)) {
12579 rc = BAD_VALUE;
12580 }
12581 }
12582
Thierry Strudel3d639192016-09-09 11:52:26 -070012583 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12584 int32_t fwk_testPatternMode =
12585 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12586 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12587 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12588
12589 if (NAME_NOT_FOUND != testPatternMode) {
12590 cam_test_pattern_data_t testPatternData;
12591 memset(&testPatternData, 0, sizeof(testPatternData));
12592 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12593 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12594 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12595 int32_t *fwk_testPatternData =
12596 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12597 testPatternData.r = fwk_testPatternData[0];
12598 testPatternData.b = fwk_testPatternData[3];
12599 switch (gCamCapability[mCameraId]->color_arrangement) {
12600 case CAM_FILTER_ARRANGEMENT_RGGB:
12601 case CAM_FILTER_ARRANGEMENT_GRBG:
12602 testPatternData.gr = fwk_testPatternData[1];
12603 testPatternData.gb = fwk_testPatternData[2];
12604 break;
12605 case CAM_FILTER_ARRANGEMENT_GBRG:
12606 case CAM_FILTER_ARRANGEMENT_BGGR:
12607 testPatternData.gr = fwk_testPatternData[2];
12608 testPatternData.gb = fwk_testPatternData[1];
12609 break;
12610 default:
12611 LOGE("color arrangement %d is not supported",
12612 gCamCapability[mCameraId]->color_arrangement);
12613 break;
12614 }
12615 }
12616 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12617 testPatternData)) {
12618 rc = BAD_VALUE;
12619 }
12620 } else {
12621 LOGE("Invalid framework sensor test pattern mode %d",
12622 fwk_testPatternMode);
12623 }
12624 }
12625
12626 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12627 size_t count = 0;
12628 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12629 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12630 gps_coords.data.d, gps_coords.count, count);
12631 if (gps_coords.count != count) {
12632 rc = BAD_VALUE;
12633 }
12634 }
12635
12636 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12637 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12638 size_t count = 0;
12639 const char *gps_methods_src = (const char *)
12640 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12641 memset(gps_methods, '\0', sizeof(gps_methods));
12642 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12643 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12644 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12645 if (GPS_PROCESSING_METHOD_SIZE != count) {
12646 rc = BAD_VALUE;
12647 }
12648 }
12649
12650 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12651 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12652 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12653 gps_timestamp)) {
12654 rc = BAD_VALUE;
12655 }
12656 }
12657
12658 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12659 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12660 cam_rotation_info_t rotation_info;
12661 if (orientation == 0) {
12662 rotation_info.rotation = ROTATE_0;
12663 } else if (orientation == 90) {
12664 rotation_info.rotation = ROTATE_90;
12665 } else if (orientation == 180) {
12666 rotation_info.rotation = ROTATE_180;
12667 } else if (orientation == 270) {
12668 rotation_info.rotation = ROTATE_270;
12669 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012670 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012671 rotation_info.streamId = snapshotStreamId;
12672 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12673 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12674 rc = BAD_VALUE;
12675 }
12676 }
12677
12678 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12679 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12680 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12681 rc = BAD_VALUE;
12682 }
12683 }
12684
12685 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12686 uint32_t thumb_quality = (uint32_t)
12687 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12688 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12689 thumb_quality)) {
12690 rc = BAD_VALUE;
12691 }
12692 }
12693
12694 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12695 cam_dimension_t dim;
12696 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12697 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12698 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12699 rc = BAD_VALUE;
12700 }
12701 }
12702
12703 // Internal metadata
12704 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12705 size_t count = 0;
12706 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12707 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12708 privatedata.data.i32, privatedata.count, count);
12709 if (privatedata.count != count) {
12710 rc = BAD_VALUE;
12711 }
12712 }
12713
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012714 // ISO/Exposure Priority
12715 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12716 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12717 cam_priority_mode_t mode =
12718 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12719 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12720 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12721 use_iso_exp_pty.previewOnly = FALSE;
12722 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12723 use_iso_exp_pty.value = *ptr;
12724
12725 if(CAM_ISO_PRIORITY == mode) {
12726 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12727 use_iso_exp_pty)) {
12728 rc = BAD_VALUE;
12729 }
12730 }
12731 else {
12732 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12733 use_iso_exp_pty)) {
12734 rc = BAD_VALUE;
12735 }
12736 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012737
12738 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12739 rc = BAD_VALUE;
12740 }
12741 }
12742 } else {
12743 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12744 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012745 }
12746 }
12747
12748 // Saturation
12749 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12750 int32_t* use_saturation =
12751 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12752 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12753 rc = BAD_VALUE;
12754 }
12755 }
12756
Thierry Strudel3d639192016-09-09 11:52:26 -070012757 // EV step
12758 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12759 gCamCapability[mCameraId]->exp_compensation_step)) {
12760 rc = BAD_VALUE;
12761 }
12762
12763 // CDS info
12764 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12765 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12766 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12767
12768 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12769 CAM_INTF_META_CDS_DATA, *cdsData)) {
12770 rc = BAD_VALUE;
12771 }
12772 }
12773
Shuzhen Wang19463d72016-03-08 11:09:52 -080012774 // Hybrid AE
12775 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12776 uint8_t *hybrid_ae = (uint8_t *)
12777 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12778
12779 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12780 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12781 rc = BAD_VALUE;
12782 }
12783 }
12784
Shuzhen Wang14415f52016-11-16 18:26:18 -080012785 // Histogram
12786 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12787 uint8_t histogramMode =
12788 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12789 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12790 histogramMode)) {
12791 rc = BAD_VALUE;
12792 }
12793 }
12794
12795 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12796 int32_t histogramBins =
12797 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12798 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12799 histogramBins)) {
12800 rc = BAD_VALUE;
12801 }
12802 }
12803
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012804 // Tracking AF
12805 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12806 uint8_t trackingAfTrigger =
12807 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12808 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12809 trackingAfTrigger)) {
12810 rc = BAD_VALUE;
12811 }
12812 }
12813
Thierry Strudel3d639192016-09-09 11:52:26 -070012814 return rc;
12815}
12816
12817/*===========================================================================
12818 * FUNCTION : captureResultCb
12819 *
12820 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12821 *
12822 * PARAMETERS :
12823 * @frame : frame information from mm-camera-interface
12824 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12825 * @userdata: userdata
12826 *
12827 * RETURN : NONE
12828 *==========================================================================*/
12829void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12830 camera3_stream_buffer_t *buffer,
12831 uint32_t frame_number, bool isInputBuffer, void *userdata)
12832{
12833 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12834 if (hw == NULL) {
12835 LOGE("Invalid hw %p", hw);
12836 return;
12837 }
12838
12839 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12840 return;
12841}
12842
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012843/*===========================================================================
12844 * FUNCTION : setBufferErrorStatus
12845 *
12846 * DESCRIPTION: Callback handler for channels to report any buffer errors
12847 *
12848 * PARAMETERS :
12849 * @ch : Channel on which buffer error is reported from
12850 * @frame_number : frame number on which buffer error is reported on
12851 * @buffer_status : buffer error status
12852 * @userdata: userdata
12853 *
12854 * RETURN : NONE
12855 *==========================================================================*/
12856void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12857 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12858{
12859 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12860 if (hw == NULL) {
12861 LOGE("Invalid hw %p", hw);
12862 return;
12863 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012864
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012865 hw->setBufferErrorStatus(ch, frame_number, err);
12866 return;
12867}
12868
12869void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12870 uint32_t frameNumber, camera3_buffer_status_t err)
12871{
12872 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12873 pthread_mutex_lock(&mMutex);
12874
12875 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12876 if (req.frame_number != frameNumber)
12877 continue;
12878 for (auto& k : req.mPendingBufferList) {
12879 if(k.stream->priv == ch) {
12880 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12881 }
12882 }
12883 }
12884
12885 pthread_mutex_unlock(&mMutex);
12886 return;
12887}
Thierry Strudel3d639192016-09-09 11:52:26 -070012888/*===========================================================================
12889 * FUNCTION : initialize
12890 *
12891 * DESCRIPTION: Pass framework callback pointers to HAL
12892 *
12893 * PARAMETERS :
12894 *
12895 *
12896 * RETURN : Success : 0
12897 * Failure: -ENODEV
12898 *==========================================================================*/
12899
12900int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12901 const camera3_callback_ops_t *callback_ops)
12902{
12903 LOGD("E");
12904 QCamera3HardwareInterface *hw =
12905 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12906 if (!hw) {
12907 LOGE("NULL camera device");
12908 return -ENODEV;
12909 }
12910
12911 int rc = hw->initialize(callback_ops);
12912 LOGD("X");
12913 return rc;
12914}
12915
12916/*===========================================================================
12917 * FUNCTION : configure_streams
12918 *
12919 * DESCRIPTION:
12920 *
12921 * PARAMETERS :
12922 *
12923 *
12924 * RETURN : Success: 0
12925 * Failure: -EINVAL (if stream configuration is invalid)
12926 * -ENODEV (fatal error)
12927 *==========================================================================*/
12928
12929int QCamera3HardwareInterface::configure_streams(
12930 const struct camera3_device *device,
12931 camera3_stream_configuration_t *stream_list)
12932{
12933 LOGD("E");
12934 QCamera3HardwareInterface *hw =
12935 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12936 if (!hw) {
12937 LOGE("NULL camera device");
12938 return -ENODEV;
12939 }
12940 int rc = hw->configureStreams(stream_list);
12941 LOGD("X");
12942 return rc;
12943}
12944
12945/*===========================================================================
12946 * FUNCTION : construct_default_request_settings
12947 *
12948 * DESCRIPTION: Configure a settings buffer to meet the required use case
12949 *
12950 * PARAMETERS :
12951 *
12952 *
12953 * RETURN : Success: Return valid metadata
12954 * Failure: Return NULL
12955 *==========================================================================*/
12956const camera_metadata_t* QCamera3HardwareInterface::
12957 construct_default_request_settings(const struct camera3_device *device,
12958 int type)
12959{
12960
12961 LOGD("E");
12962 camera_metadata_t* fwk_metadata = NULL;
12963 QCamera3HardwareInterface *hw =
12964 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12965 if (!hw) {
12966 LOGE("NULL camera device");
12967 return NULL;
12968 }
12969
12970 fwk_metadata = hw->translateCapabilityToMetadata(type);
12971
12972 LOGD("X");
12973 return fwk_metadata;
12974}
12975
12976/*===========================================================================
12977 * FUNCTION : process_capture_request
12978 *
12979 * DESCRIPTION:
12980 *
12981 * PARAMETERS :
12982 *
12983 *
12984 * RETURN :
12985 *==========================================================================*/
12986int QCamera3HardwareInterface::process_capture_request(
12987 const struct camera3_device *device,
12988 camera3_capture_request_t *request)
12989{
12990 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012991 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012992 QCamera3HardwareInterface *hw =
12993 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12994 if (!hw) {
12995 LOGE("NULL camera device");
12996 return -EINVAL;
12997 }
12998
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012999 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013000 LOGD("X");
13001 return rc;
13002}
13003
13004/*===========================================================================
13005 * FUNCTION : dump
13006 *
13007 * DESCRIPTION:
13008 *
13009 * PARAMETERS :
13010 *
13011 *
13012 * RETURN :
13013 *==========================================================================*/
13014
13015void QCamera3HardwareInterface::dump(
13016 const struct camera3_device *device, int fd)
13017{
13018 /* Log level property is read when "adb shell dumpsys media.camera" is
13019 called so that the log level can be controlled without restarting
13020 the media server */
13021 getLogLevel();
13022
13023 LOGD("E");
13024 QCamera3HardwareInterface *hw =
13025 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13026 if (!hw) {
13027 LOGE("NULL camera device");
13028 return;
13029 }
13030
13031 hw->dump(fd);
13032 LOGD("X");
13033 return;
13034}
13035
13036/*===========================================================================
13037 * FUNCTION : flush
13038 *
13039 * DESCRIPTION:
13040 *
13041 * PARAMETERS :
13042 *
13043 *
13044 * RETURN :
13045 *==========================================================================*/
13046
13047int QCamera3HardwareInterface::flush(
13048 const struct camera3_device *device)
13049{
13050 int rc;
13051 LOGD("E");
13052 QCamera3HardwareInterface *hw =
13053 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13054 if (!hw) {
13055 LOGE("NULL camera device");
13056 return -EINVAL;
13057 }
13058
13059 pthread_mutex_lock(&hw->mMutex);
13060 // Validate current state
13061 switch (hw->mState) {
13062 case STARTED:
13063 /* valid state */
13064 break;
13065
13066 case ERROR:
13067 pthread_mutex_unlock(&hw->mMutex);
13068 hw->handleCameraDeviceError();
13069 return -ENODEV;
13070
13071 default:
13072 LOGI("Flush returned during state %d", hw->mState);
13073 pthread_mutex_unlock(&hw->mMutex);
13074 return 0;
13075 }
13076 pthread_mutex_unlock(&hw->mMutex);
13077
13078 rc = hw->flush(true /* restart channels */ );
13079 LOGD("X");
13080 return rc;
13081}
13082
13083/*===========================================================================
13084 * FUNCTION : close_camera_device
13085 *
13086 * DESCRIPTION:
13087 *
13088 * PARAMETERS :
13089 *
13090 *
13091 * RETURN :
13092 *==========================================================================*/
13093int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13094{
13095 int ret = NO_ERROR;
13096 QCamera3HardwareInterface *hw =
13097 reinterpret_cast<QCamera3HardwareInterface *>(
13098 reinterpret_cast<camera3_device_t *>(device)->priv);
13099 if (!hw) {
13100 LOGE("NULL camera device");
13101 return BAD_VALUE;
13102 }
13103
13104 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13105 delete hw;
13106 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013107 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013108 return ret;
13109}
13110
13111/*===========================================================================
13112 * FUNCTION : getWaveletDenoiseProcessPlate
13113 *
13114 * DESCRIPTION: query wavelet denoise process plate
13115 *
13116 * PARAMETERS : None
13117 *
13118 * RETURN : WNR prcocess plate value
13119 *==========================================================================*/
13120cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13121{
13122 char prop[PROPERTY_VALUE_MAX];
13123 memset(prop, 0, sizeof(prop));
13124 property_get("persist.denoise.process.plates", prop, "0");
13125 int processPlate = atoi(prop);
13126 switch(processPlate) {
13127 case 0:
13128 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13129 case 1:
13130 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13131 case 2:
13132 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13133 case 3:
13134 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13135 default:
13136 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13137 }
13138}
13139
13140
13141/*===========================================================================
13142 * FUNCTION : getTemporalDenoiseProcessPlate
13143 *
13144 * DESCRIPTION: query temporal denoise process plate
13145 *
13146 * PARAMETERS : None
13147 *
13148 * RETURN : TNR prcocess plate value
13149 *==========================================================================*/
13150cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13151{
13152 char prop[PROPERTY_VALUE_MAX];
13153 memset(prop, 0, sizeof(prop));
13154 property_get("persist.tnr.process.plates", prop, "0");
13155 int processPlate = atoi(prop);
13156 switch(processPlate) {
13157 case 0:
13158 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13159 case 1:
13160 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13161 case 2:
13162 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13163 case 3:
13164 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13165 default:
13166 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13167 }
13168}
13169
13170
13171/*===========================================================================
13172 * FUNCTION : extractSceneMode
13173 *
13174 * DESCRIPTION: Extract scene mode from frameworks set metadata
13175 *
13176 * PARAMETERS :
13177 * @frame_settings: CameraMetadata reference
13178 * @metaMode: ANDROID_CONTORL_MODE
13179 * @hal_metadata: hal metadata structure
13180 *
13181 * RETURN : None
13182 *==========================================================================*/
13183int32_t QCamera3HardwareInterface::extractSceneMode(
13184 const CameraMetadata &frame_settings, uint8_t metaMode,
13185 metadata_buffer_t *hal_metadata)
13186{
13187 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013188 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13189
13190 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13191 LOGD("Ignoring control mode OFF_KEEP_STATE");
13192 return NO_ERROR;
13193 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013194
13195 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13196 camera_metadata_ro_entry entry =
13197 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13198 if (0 == entry.count)
13199 return rc;
13200
13201 uint8_t fwk_sceneMode = entry.data.u8[0];
13202
13203 int val = lookupHalName(SCENE_MODES_MAP,
13204 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13205 fwk_sceneMode);
13206 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013207 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013208 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013209 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013210 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013211
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013212 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13213 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13214 }
13215
13216 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13217 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013218 cam_hdr_param_t hdr_params;
13219 hdr_params.hdr_enable = 1;
13220 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13221 hdr_params.hdr_need_1x = false;
13222 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13223 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13224 rc = BAD_VALUE;
13225 }
13226 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013227
Thierry Strudel3d639192016-09-09 11:52:26 -070013228 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13229 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13230 rc = BAD_VALUE;
13231 }
13232 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013233
13234 if (mForceHdrSnapshot) {
13235 cam_hdr_param_t hdr_params;
13236 hdr_params.hdr_enable = 1;
13237 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13238 hdr_params.hdr_need_1x = false;
13239 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13240 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13241 rc = BAD_VALUE;
13242 }
13243 }
13244
Thierry Strudel3d639192016-09-09 11:52:26 -070013245 return rc;
13246}
13247
13248/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013249 * FUNCTION : setVideoHdrMode
13250 *
13251 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13252 *
13253 * PARAMETERS :
13254 * @hal_metadata: hal metadata structure
13255 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13256 *
13257 * RETURN : None
13258 *==========================================================================*/
13259int32_t QCamera3HardwareInterface::setVideoHdrMode(
13260 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13261{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013262 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13263 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13264 }
13265
13266 LOGE("Invalid Video HDR mode %d!", vhdr);
13267 return BAD_VALUE;
13268}
13269
13270/*===========================================================================
13271 * FUNCTION : setSensorHDR
13272 *
13273 * DESCRIPTION: Enable/disable sensor HDR.
13274 *
13275 * PARAMETERS :
13276 * @hal_metadata: hal metadata structure
13277 * @enable: boolean whether to enable/disable sensor HDR
13278 *
13279 * RETURN : None
13280 *==========================================================================*/
13281int32_t QCamera3HardwareInterface::setSensorHDR(
13282 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13283{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013284 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013285 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13286
13287 if (enable) {
13288 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13289 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13290 #ifdef _LE_CAMERA_
13291 //Default to staggered HDR for IOT
13292 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13293 #else
13294 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13295 #endif
13296 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13297 }
13298
13299 bool isSupported = false;
13300 switch (sensor_hdr) {
13301 case CAM_SENSOR_HDR_IN_SENSOR:
13302 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13303 CAM_QCOM_FEATURE_SENSOR_HDR) {
13304 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013305 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013306 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013307 break;
13308 case CAM_SENSOR_HDR_ZIGZAG:
13309 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13310 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13311 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013312 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013313 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013314 break;
13315 case CAM_SENSOR_HDR_STAGGERED:
13316 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13317 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13318 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013319 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013320 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013321 break;
13322 case CAM_SENSOR_HDR_OFF:
13323 isSupported = true;
13324 LOGD("Turning off sensor HDR");
13325 break;
13326 default:
13327 LOGE("HDR mode %d not supported", sensor_hdr);
13328 rc = BAD_VALUE;
13329 break;
13330 }
13331
13332 if(isSupported) {
13333 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13334 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13335 rc = BAD_VALUE;
13336 } else {
13337 if(!isVideoHdrEnable)
13338 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013339 }
13340 }
13341 return rc;
13342}
13343
13344/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013345 * FUNCTION : needRotationReprocess
13346 *
13347 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13348 *
13349 * PARAMETERS : none
13350 *
13351 * RETURN : true: needed
13352 * false: no need
13353 *==========================================================================*/
13354bool QCamera3HardwareInterface::needRotationReprocess()
13355{
13356 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13357 // current rotation is not zero, and pp has the capability to process rotation
13358 LOGH("need do reprocess for rotation");
13359 return true;
13360 }
13361
13362 return false;
13363}
13364
13365/*===========================================================================
13366 * FUNCTION : needReprocess
13367 *
13368 * DESCRIPTION: if reprocess in needed
13369 *
13370 * PARAMETERS : none
13371 *
13372 * RETURN : true: needed
13373 * false: no need
13374 *==========================================================================*/
13375bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13376{
13377 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13378 // TODO: add for ZSL HDR later
13379 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13380 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13381 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13382 return true;
13383 } else {
13384 LOGH("already post processed frame");
13385 return false;
13386 }
13387 }
13388 return needRotationReprocess();
13389}
13390
13391/*===========================================================================
13392 * FUNCTION : needJpegExifRotation
13393 *
13394 * DESCRIPTION: if rotation from jpeg is needed
13395 *
13396 * PARAMETERS : none
13397 *
13398 * RETURN : true: needed
13399 * false: no need
13400 *==========================================================================*/
13401bool QCamera3HardwareInterface::needJpegExifRotation()
13402{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013403 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013404 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13405 LOGD("Need use Jpeg EXIF Rotation");
13406 return true;
13407 }
13408 return false;
13409}
13410
13411/*===========================================================================
13412 * FUNCTION : addOfflineReprocChannel
13413 *
13414 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13415 * coming from input channel
13416 *
13417 * PARAMETERS :
13418 * @config : reprocess configuration
13419 * @inputChHandle : pointer to the input (source) channel
13420 *
13421 *
13422 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13423 *==========================================================================*/
13424QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13425 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13426{
13427 int32_t rc = NO_ERROR;
13428 QCamera3ReprocessChannel *pChannel = NULL;
13429
13430 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013431 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13432 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013433 if (NULL == pChannel) {
13434 LOGE("no mem for reprocess channel");
13435 return NULL;
13436 }
13437
13438 rc = pChannel->initialize(IS_TYPE_NONE);
13439 if (rc != NO_ERROR) {
13440 LOGE("init reprocess channel failed, ret = %d", rc);
13441 delete pChannel;
13442 return NULL;
13443 }
13444
13445 // pp feature config
13446 cam_pp_feature_config_t pp_config;
13447 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13448
13449 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13450 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13451 & CAM_QCOM_FEATURE_DSDN) {
13452 //Use CPP CDS incase h/w supports it.
13453 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13454 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13455 }
13456 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13457 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13458 }
13459
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013460 if (config.hdr_param.hdr_enable) {
13461 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13462 pp_config.hdr_param = config.hdr_param;
13463 }
13464
13465 if (mForceHdrSnapshot) {
13466 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13467 pp_config.hdr_param.hdr_enable = 1;
13468 pp_config.hdr_param.hdr_need_1x = 0;
13469 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13470 }
13471
Thierry Strudel3d639192016-09-09 11:52:26 -070013472 rc = pChannel->addReprocStreamsFromSource(pp_config,
13473 config,
13474 IS_TYPE_NONE,
13475 mMetadataChannel);
13476
13477 if (rc != NO_ERROR) {
13478 delete pChannel;
13479 return NULL;
13480 }
13481 return pChannel;
13482}
13483
13484/*===========================================================================
13485 * FUNCTION : getMobicatMask
13486 *
13487 * DESCRIPTION: returns mobicat mask
13488 *
13489 * PARAMETERS : none
13490 *
13491 * RETURN : mobicat mask
13492 *
13493 *==========================================================================*/
13494uint8_t QCamera3HardwareInterface::getMobicatMask()
13495{
13496 return m_MobicatMask;
13497}
13498
13499/*===========================================================================
13500 * FUNCTION : setMobicat
13501 *
13502 * DESCRIPTION: set Mobicat on/off.
13503 *
13504 * PARAMETERS :
13505 * @params : none
13506 *
13507 * RETURN : int32_t type of status
13508 * NO_ERROR -- success
13509 * none-zero failure code
13510 *==========================================================================*/
13511int32_t QCamera3HardwareInterface::setMobicat()
13512{
13513 char value [PROPERTY_VALUE_MAX];
13514 property_get("persist.camera.mobicat", value, "0");
13515 int32_t ret = NO_ERROR;
13516 uint8_t enableMobi = (uint8_t)atoi(value);
13517
13518 if (enableMobi) {
13519 tune_cmd_t tune_cmd;
13520 tune_cmd.type = SET_RELOAD_CHROMATIX;
13521 tune_cmd.module = MODULE_ALL;
13522 tune_cmd.value = TRUE;
13523 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13524 CAM_INTF_PARM_SET_VFE_COMMAND,
13525 tune_cmd);
13526
13527 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13528 CAM_INTF_PARM_SET_PP_COMMAND,
13529 tune_cmd);
13530 }
13531 m_MobicatMask = enableMobi;
13532
13533 return ret;
13534}
13535
13536/*===========================================================================
13537* FUNCTION : getLogLevel
13538*
13539* DESCRIPTION: Reads the log level property into a variable
13540*
13541* PARAMETERS :
13542* None
13543*
13544* RETURN :
13545* None
13546*==========================================================================*/
13547void QCamera3HardwareInterface::getLogLevel()
13548{
13549 char prop[PROPERTY_VALUE_MAX];
13550 uint32_t globalLogLevel = 0;
13551
13552 property_get("persist.camera.hal.debug", prop, "0");
13553 int val = atoi(prop);
13554 if (0 <= val) {
13555 gCamHal3LogLevel = (uint32_t)val;
13556 }
13557
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013558 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013559 gKpiDebugLevel = atoi(prop);
13560
13561 property_get("persist.camera.global.debug", prop, "0");
13562 val = atoi(prop);
13563 if (0 <= val) {
13564 globalLogLevel = (uint32_t)val;
13565 }
13566
13567 /* Highest log level among hal.logs and global.logs is selected */
13568 if (gCamHal3LogLevel < globalLogLevel)
13569 gCamHal3LogLevel = globalLogLevel;
13570
13571 return;
13572}
13573
13574/*===========================================================================
13575 * FUNCTION : validateStreamRotations
13576 *
13577 * DESCRIPTION: Check if the rotations requested are supported
13578 *
13579 * PARAMETERS :
13580 * @stream_list : streams to be configured
13581 *
13582 * RETURN : NO_ERROR on success
13583 * -EINVAL on failure
13584 *
13585 *==========================================================================*/
13586int QCamera3HardwareInterface::validateStreamRotations(
13587 camera3_stream_configuration_t *streamList)
13588{
13589 int rc = NO_ERROR;
13590
13591 /*
13592 * Loop through all streams requested in configuration
13593 * Check if unsupported rotations have been requested on any of them
13594 */
13595 for (size_t j = 0; j < streamList->num_streams; j++){
13596 camera3_stream_t *newStream = streamList->streams[j];
13597
13598 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13599 bool isImplDef = (newStream->format ==
13600 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13601 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13602 isImplDef);
13603
13604 if (isRotated && (!isImplDef || isZsl)) {
13605 LOGE("Error: Unsupported rotation of %d requested for stream"
13606 "type:%d and stream format:%d",
13607 newStream->rotation, newStream->stream_type,
13608 newStream->format);
13609 rc = -EINVAL;
13610 break;
13611 }
13612 }
13613
13614 return rc;
13615}
13616
13617/*===========================================================================
13618* FUNCTION : getFlashInfo
13619*
13620* DESCRIPTION: Retrieve information about whether the device has a flash.
13621*
13622* PARAMETERS :
13623* @cameraId : Camera id to query
13624* @hasFlash : Boolean indicating whether there is a flash device
13625* associated with given camera
13626* @flashNode : If a flash device exists, this will be its device node.
13627*
13628* RETURN :
13629* None
13630*==========================================================================*/
13631void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13632 bool& hasFlash,
13633 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13634{
13635 cam_capability_t* camCapability = gCamCapability[cameraId];
13636 if (NULL == camCapability) {
13637 hasFlash = false;
13638 flashNode[0] = '\0';
13639 } else {
13640 hasFlash = camCapability->flash_available;
13641 strlcpy(flashNode,
13642 (char*)camCapability->flash_dev_name,
13643 QCAMERA_MAX_FILEPATH_LENGTH);
13644 }
13645}
13646
13647/*===========================================================================
13648* FUNCTION : getEepromVersionInfo
13649*
13650* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13651*
13652* PARAMETERS : None
13653*
13654* RETURN : string describing EEPROM version
13655* "\0" if no such info available
13656*==========================================================================*/
13657const char *QCamera3HardwareInterface::getEepromVersionInfo()
13658{
13659 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13660}
13661
13662/*===========================================================================
13663* FUNCTION : getLdafCalib
13664*
13665* DESCRIPTION: Retrieve Laser AF calibration data
13666*
13667* PARAMETERS : None
13668*
13669* RETURN : Two uint32_t describing laser AF calibration data
13670* NULL if none is available.
13671*==========================================================================*/
13672const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13673{
13674 if (mLdafCalibExist) {
13675 return &mLdafCalib[0];
13676 } else {
13677 return NULL;
13678 }
13679}
13680
13681/*===========================================================================
13682 * FUNCTION : dynamicUpdateMetaStreamInfo
13683 *
13684 * DESCRIPTION: This function:
13685 * (1) stops all the channels
13686 * (2) returns error on pending requests and buffers
13687 * (3) sends metastream_info in setparams
13688 * (4) starts all channels
13689 * This is useful when sensor has to be restarted to apply any
13690 * settings such as frame rate from a different sensor mode
13691 *
13692 * PARAMETERS : None
13693 *
13694 * RETURN : NO_ERROR on success
13695 * Error codes on failure
13696 *
13697 *==========================================================================*/
13698int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13699{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013700 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013701 int rc = NO_ERROR;
13702
13703 LOGD("E");
13704
13705 rc = stopAllChannels();
13706 if (rc < 0) {
13707 LOGE("stopAllChannels failed");
13708 return rc;
13709 }
13710
13711 rc = notifyErrorForPendingRequests();
13712 if (rc < 0) {
13713 LOGE("notifyErrorForPendingRequests failed");
13714 return rc;
13715 }
13716
13717 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13718 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13719 "Format:%d",
13720 mStreamConfigInfo.type[i],
13721 mStreamConfigInfo.stream_sizes[i].width,
13722 mStreamConfigInfo.stream_sizes[i].height,
13723 mStreamConfigInfo.postprocess_mask[i],
13724 mStreamConfigInfo.format[i]);
13725 }
13726
13727 /* Send meta stream info once again so that ISP can start */
13728 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13729 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13730 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13731 mParameters);
13732 if (rc < 0) {
13733 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13734 }
13735
13736 rc = startAllChannels();
13737 if (rc < 0) {
13738 LOGE("startAllChannels failed");
13739 return rc;
13740 }
13741
13742 LOGD("X");
13743 return rc;
13744}
13745
13746/*===========================================================================
13747 * FUNCTION : stopAllChannels
13748 *
13749 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13750 *
13751 * PARAMETERS : None
13752 *
13753 * RETURN : NO_ERROR on success
13754 * Error codes on failure
13755 *
13756 *==========================================================================*/
13757int32_t QCamera3HardwareInterface::stopAllChannels()
13758{
13759 int32_t rc = NO_ERROR;
13760
13761 LOGD("Stopping all channels");
13762 // Stop the Streams/Channels
13763 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13764 it != mStreamInfo.end(); it++) {
13765 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13766 if (channel) {
13767 channel->stop();
13768 }
13769 (*it)->status = INVALID;
13770 }
13771
13772 if (mSupportChannel) {
13773 mSupportChannel->stop();
13774 }
13775 if (mAnalysisChannel) {
13776 mAnalysisChannel->stop();
13777 }
13778 if (mRawDumpChannel) {
13779 mRawDumpChannel->stop();
13780 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013781 if (mHdrPlusRawSrcChannel) {
13782 mHdrPlusRawSrcChannel->stop();
13783 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013784 if (mMetadataChannel) {
13785 /* If content of mStreamInfo is not 0, there is metadata stream */
13786 mMetadataChannel->stop();
13787 }
13788
13789 LOGD("All channels stopped");
13790 return rc;
13791}
13792
13793/*===========================================================================
13794 * FUNCTION : startAllChannels
13795 *
13796 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13797 *
13798 * PARAMETERS : None
13799 *
13800 * RETURN : NO_ERROR on success
13801 * Error codes on failure
13802 *
13803 *==========================================================================*/
13804int32_t QCamera3HardwareInterface::startAllChannels()
13805{
13806 int32_t rc = NO_ERROR;
13807
13808 LOGD("Start all channels ");
13809 // Start the Streams/Channels
13810 if (mMetadataChannel) {
13811 /* If content of mStreamInfo is not 0, there is metadata stream */
13812 rc = mMetadataChannel->start();
13813 if (rc < 0) {
13814 LOGE("META channel start failed");
13815 return rc;
13816 }
13817 }
13818 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13819 it != mStreamInfo.end(); it++) {
13820 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13821 if (channel) {
13822 rc = channel->start();
13823 if (rc < 0) {
13824 LOGE("channel start failed");
13825 return rc;
13826 }
13827 }
13828 }
13829 if (mAnalysisChannel) {
13830 mAnalysisChannel->start();
13831 }
13832 if (mSupportChannel) {
13833 rc = mSupportChannel->start();
13834 if (rc < 0) {
13835 LOGE("Support channel start failed");
13836 return rc;
13837 }
13838 }
13839 if (mRawDumpChannel) {
13840 rc = mRawDumpChannel->start();
13841 if (rc < 0) {
13842 LOGE("RAW dump channel start failed");
13843 return rc;
13844 }
13845 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013846 if (mHdrPlusRawSrcChannel) {
13847 rc = mHdrPlusRawSrcChannel->start();
13848 if (rc < 0) {
13849 LOGE("HDR+ RAW channel start failed");
13850 return rc;
13851 }
13852 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013853
13854 LOGD("All channels started");
13855 return rc;
13856}
13857
13858/*===========================================================================
13859 * FUNCTION : notifyErrorForPendingRequests
13860 *
13861 * DESCRIPTION: This function sends error for all the pending requests/buffers
13862 *
13863 * PARAMETERS : None
13864 *
13865 * RETURN : Error codes
13866 * NO_ERROR on success
13867 *
13868 *==========================================================================*/
13869int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13870{
13871 int32_t rc = NO_ERROR;
13872 unsigned int frameNum = 0;
13873 camera3_capture_result_t result;
13874 camera3_stream_buffer_t *pStream_Buf = NULL;
13875
13876 memset(&result, 0, sizeof(camera3_capture_result_t));
13877
13878 if (mPendingRequestsList.size() > 0) {
13879 pendingRequestIterator i = mPendingRequestsList.begin();
13880 frameNum = i->frame_number;
13881 } else {
13882 /* There might still be pending buffers even though there are
13883 no pending requests. Setting the frameNum to MAX so that
13884 all the buffers with smaller frame numbers are returned */
13885 frameNum = UINT_MAX;
13886 }
13887
13888 LOGH("Oldest frame num on mPendingRequestsList = %u",
13889 frameNum);
13890
Emilian Peev7650c122017-01-19 08:24:33 -080013891 notifyErrorFoPendingDepthData(mDepthChannel);
13892
Thierry Strudel3d639192016-09-09 11:52:26 -070013893 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13894 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13895
13896 if (req->frame_number < frameNum) {
13897 // Send Error notify to frameworks for each buffer for which
13898 // metadata buffer is already sent
13899 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13900 req->frame_number, req->mPendingBufferList.size());
13901
13902 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13903 if (NULL == pStream_Buf) {
13904 LOGE("No memory for pending buffers array");
13905 return NO_MEMORY;
13906 }
13907 memset(pStream_Buf, 0,
13908 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13909 result.result = NULL;
13910 result.frame_number = req->frame_number;
13911 result.num_output_buffers = req->mPendingBufferList.size();
13912 result.output_buffers = pStream_Buf;
13913
13914 size_t index = 0;
13915 for (auto info = req->mPendingBufferList.begin();
13916 info != req->mPendingBufferList.end(); ) {
13917
13918 camera3_notify_msg_t notify_msg;
13919 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13920 notify_msg.type = CAMERA3_MSG_ERROR;
13921 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13922 notify_msg.message.error.error_stream = info->stream;
13923 notify_msg.message.error.frame_number = req->frame_number;
13924 pStream_Buf[index].acquire_fence = -1;
13925 pStream_Buf[index].release_fence = -1;
13926 pStream_Buf[index].buffer = info->buffer;
13927 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13928 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013929 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013930 index++;
13931 // Remove buffer from list
13932 info = req->mPendingBufferList.erase(info);
13933 }
13934
13935 // Remove this request from Map
13936 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13937 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13938 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13939
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013940 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013941
13942 delete [] pStream_Buf;
13943 } else {
13944
13945 // Go through the pending requests info and send error request to framework
13946 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13947
13948 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13949
13950 // Send error notify to frameworks
13951 camera3_notify_msg_t notify_msg;
13952 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13953 notify_msg.type = CAMERA3_MSG_ERROR;
13954 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13955 notify_msg.message.error.error_stream = NULL;
13956 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013957 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013958
13959 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13960 if (NULL == pStream_Buf) {
13961 LOGE("No memory for pending buffers array");
13962 return NO_MEMORY;
13963 }
13964 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13965
13966 result.result = NULL;
13967 result.frame_number = req->frame_number;
13968 result.input_buffer = i->input_buffer;
13969 result.num_output_buffers = req->mPendingBufferList.size();
13970 result.output_buffers = pStream_Buf;
13971
13972 size_t index = 0;
13973 for (auto info = req->mPendingBufferList.begin();
13974 info != req->mPendingBufferList.end(); ) {
13975 pStream_Buf[index].acquire_fence = -1;
13976 pStream_Buf[index].release_fence = -1;
13977 pStream_Buf[index].buffer = info->buffer;
13978 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13979 pStream_Buf[index].stream = info->stream;
13980 index++;
13981 // Remove buffer from list
13982 info = req->mPendingBufferList.erase(info);
13983 }
13984
13985 // Remove this request from Map
13986 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13987 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13988 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13989
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013990 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013991 delete [] pStream_Buf;
13992 i = erasePendingRequest(i);
13993 }
13994 }
13995
13996 /* Reset pending frame Drop list and requests list */
13997 mPendingFrameDropList.clear();
13998
13999 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
14000 req.mPendingBufferList.clear();
14001 }
14002 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014003 LOGH("Cleared all the pending buffers ");
14004
14005 return rc;
14006}
14007
14008bool QCamera3HardwareInterface::isOnEncoder(
14009 const cam_dimension_t max_viewfinder_size,
14010 uint32_t width, uint32_t height)
14011{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014012 return ((width > (uint32_t)max_viewfinder_size.width) ||
14013 (height > (uint32_t)max_viewfinder_size.height) ||
14014 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14015 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014016}
14017
14018/*===========================================================================
14019 * FUNCTION : setBundleInfo
14020 *
14021 * DESCRIPTION: Set bundle info for all streams that are bundle.
14022 *
14023 * PARAMETERS : None
14024 *
14025 * RETURN : NO_ERROR on success
14026 * Error codes on failure
14027 *==========================================================================*/
14028int32_t QCamera3HardwareInterface::setBundleInfo()
14029{
14030 int32_t rc = NO_ERROR;
14031
14032 if (mChannelHandle) {
14033 cam_bundle_config_t bundleInfo;
14034 memset(&bundleInfo, 0, sizeof(bundleInfo));
14035 rc = mCameraHandle->ops->get_bundle_info(
14036 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14037 if (rc != NO_ERROR) {
14038 LOGE("get_bundle_info failed");
14039 return rc;
14040 }
14041 if (mAnalysisChannel) {
14042 mAnalysisChannel->setBundleInfo(bundleInfo);
14043 }
14044 if (mSupportChannel) {
14045 mSupportChannel->setBundleInfo(bundleInfo);
14046 }
14047 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14048 it != mStreamInfo.end(); it++) {
14049 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14050 channel->setBundleInfo(bundleInfo);
14051 }
14052 if (mRawDumpChannel) {
14053 mRawDumpChannel->setBundleInfo(bundleInfo);
14054 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014055 if (mHdrPlusRawSrcChannel) {
14056 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14057 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014058 }
14059
14060 return rc;
14061}
14062
14063/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014064 * FUNCTION : setInstantAEC
14065 *
14066 * DESCRIPTION: Set Instant AEC related params.
14067 *
14068 * PARAMETERS :
14069 * @meta: CameraMetadata reference
14070 *
14071 * RETURN : NO_ERROR on success
14072 * Error codes on failure
14073 *==========================================================================*/
14074int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14075{
14076 int32_t rc = NO_ERROR;
14077 uint8_t val = 0;
14078 char prop[PROPERTY_VALUE_MAX];
14079
14080 // First try to configure instant AEC from framework metadata
14081 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14082 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14083 }
14084
14085 // If framework did not set this value, try to read from set prop.
14086 if (val == 0) {
14087 memset(prop, 0, sizeof(prop));
14088 property_get("persist.camera.instant.aec", prop, "0");
14089 val = (uint8_t)atoi(prop);
14090 }
14091
14092 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14093 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14094 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14095 mInstantAEC = val;
14096 mInstantAECSettledFrameNumber = 0;
14097 mInstantAecFrameIdxCount = 0;
14098 LOGH("instantAEC value set %d",val);
14099 if (mInstantAEC) {
14100 memset(prop, 0, sizeof(prop));
14101 property_get("persist.camera.ae.instant.bound", prop, "10");
14102 int32_t aec_frame_skip_cnt = atoi(prop);
14103 if (aec_frame_skip_cnt >= 0) {
14104 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14105 } else {
14106 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14107 rc = BAD_VALUE;
14108 }
14109 }
14110 } else {
14111 LOGE("Bad instant aec value set %d", val);
14112 rc = BAD_VALUE;
14113 }
14114 return rc;
14115}
14116
14117/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014118 * FUNCTION : get_num_overall_buffers
14119 *
14120 * DESCRIPTION: Estimate number of pending buffers across all requests.
14121 *
14122 * PARAMETERS : None
14123 *
14124 * RETURN : Number of overall pending buffers
14125 *
14126 *==========================================================================*/
14127uint32_t PendingBuffersMap::get_num_overall_buffers()
14128{
14129 uint32_t sum_buffers = 0;
14130 for (auto &req : mPendingBuffersInRequest) {
14131 sum_buffers += req.mPendingBufferList.size();
14132 }
14133 return sum_buffers;
14134}
14135
14136/*===========================================================================
14137 * FUNCTION : removeBuf
14138 *
14139 * DESCRIPTION: Remove a matching buffer from tracker.
14140 *
14141 * PARAMETERS : @buffer: image buffer for the callback
14142 *
14143 * RETURN : None
14144 *
14145 *==========================================================================*/
14146void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14147{
14148 bool buffer_found = false;
14149 for (auto req = mPendingBuffersInRequest.begin();
14150 req != mPendingBuffersInRequest.end(); req++) {
14151 for (auto k = req->mPendingBufferList.begin();
14152 k != req->mPendingBufferList.end(); k++ ) {
14153 if (k->buffer == buffer) {
14154 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14155 req->frame_number, buffer);
14156 k = req->mPendingBufferList.erase(k);
14157 if (req->mPendingBufferList.empty()) {
14158 // Remove this request from Map
14159 req = mPendingBuffersInRequest.erase(req);
14160 }
14161 buffer_found = true;
14162 break;
14163 }
14164 }
14165 if (buffer_found) {
14166 break;
14167 }
14168 }
14169 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14170 get_num_overall_buffers());
14171}
14172
14173/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014174 * FUNCTION : getBufErrStatus
14175 *
14176 * DESCRIPTION: get buffer error status
14177 *
14178 * PARAMETERS : @buffer: buffer handle
14179 *
14180 * RETURN : Error status
14181 *
14182 *==========================================================================*/
14183int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14184{
14185 for (auto& req : mPendingBuffersInRequest) {
14186 for (auto& k : req.mPendingBufferList) {
14187 if (k.buffer == buffer)
14188 return k.bufStatus;
14189 }
14190 }
14191 return CAMERA3_BUFFER_STATUS_OK;
14192}
14193
14194/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014195 * FUNCTION : setPAAFSupport
14196 *
14197 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14198 * feature mask according to stream type and filter
14199 * arrangement
14200 *
14201 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14202 * @stream_type: stream type
14203 * @filter_arrangement: filter arrangement
14204 *
14205 * RETURN : None
14206 *==========================================================================*/
14207void QCamera3HardwareInterface::setPAAFSupport(
14208 cam_feature_mask_t& feature_mask,
14209 cam_stream_type_t stream_type,
14210 cam_color_filter_arrangement_t filter_arrangement)
14211{
Thierry Strudel3d639192016-09-09 11:52:26 -070014212 switch (filter_arrangement) {
14213 case CAM_FILTER_ARRANGEMENT_RGGB:
14214 case CAM_FILTER_ARRANGEMENT_GRBG:
14215 case CAM_FILTER_ARRANGEMENT_GBRG:
14216 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014217 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14218 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014219 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014220 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14221 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014222 }
14223 break;
14224 case CAM_FILTER_ARRANGEMENT_Y:
14225 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14226 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14227 }
14228 break;
14229 default:
14230 break;
14231 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014232 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14233 feature_mask, stream_type, filter_arrangement);
14234
14235
Thierry Strudel3d639192016-09-09 11:52:26 -070014236}
14237
14238/*===========================================================================
14239* FUNCTION : getSensorMountAngle
14240*
14241* DESCRIPTION: Retrieve sensor mount angle
14242*
14243* PARAMETERS : None
14244*
14245* RETURN : sensor mount angle in uint32_t
14246*==========================================================================*/
14247uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14248{
14249 return gCamCapability[mCameraId]->sensor_mount_angle;
14250}
14251
14252/*===========================================================================
14253* FUNCTION : getRelatedCalibrationData
14254*
14255* DESCRIPTION: Retrieve related system calibration data
14256*
14257* PARAMETERS : None
14258*
14259* RETURN : Pointer of related system calibration data
14260*==========================================================================*/
14261const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14262{
14263 return (const cam_related_system_calibration_data_t *)
14264 &(gCamCapability[mCameraId]->related_cam_calibration);
14265}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014266
14267/*===========================================================================
14268 * FUNCTION : is60HzZone
14269 *
14270 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14271 *
14272 * PARAMETERS : None
14273 *
14274 * RETURN : True if in 60Hz zone, False otherwise
14275 *==========================================================================*/
14276bool QCamera3HardwareInterface::is60HzZone()
14277{
14278 time_t t = time(NULL);
14279 struct tm lt;
14280
14281 struct tm* r = localtime_r(&t, &lt);
14282
14283 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14284 return true;
14285 else
14286 return false;
14287}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014288
14289/*===========================================================================
14290 * FUNCTION : adjustBlackLevelForCFA
14291 *
14292 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14293 * of bayer CFA (Color Filter Array).
14294 *
14295 * PARAMETERS : @input: black level pattern in the order of RGGB
14296 * @output: black level pattern in the order of CFA
14297 * @color_arrangement: CFA color arrangement
14298 *
14299 * RETURN : None
14300 *==========================================================================*/
14301template<typename T>
14302void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14303 T input[BLACK_LEVEL_PATTERN_CNT],
14304 T output[BLACK_LEVEL_PATTERN_CNT],
14305 cam_color_filter_arrangement_t color_arrangement)
14306{
14307 switch (color_arrangement) {
14308 case CAM_FILTER_ARRANGEMENT_GRBG:
14309 output[0] = input[1];
14310 output[1] = input[0];
14311 output[2] = input[3];
14312 output[3] = input[2];
14313 break;
14314 case CAM_FILTER_ARRANGEMENT_GBRG:
14315 output[0] = input[2];
14316 output[1] = input[3];
14317 output[2] = input[0];
14318 output[3] = input[1];
14319 break;
14320 case CAM_FILTER_ARRANGEMENT_BGGR:
14321 output[0] = input[3];
14322 output[1] = input[2];
14323 output[2] = input[1];
14324 output[3] = input[0];
14325 break;
14326 case CAM_FILTER_ARRANGEMENT_RGGB:
14327 output[0] = input[0];
14328 output[1] = input[1];
14329 output[2] = input[2];
14330 output[3] = input[3];
14331 break;
14332 default:
14333 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14334 break;
14335 }
14336}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014337
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014338void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14339 CameraMetadata &resultMetadata,
14340 std::shared_ptr<metadata_buffer_t> settings)
14341{
14342 if (settings == nullptr) {
14343 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14344 return;
14345 }
14346
14347 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14348 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14349 }
14350
14351 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14352 String8 str((const char *)gps_methods);
14353 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14354 }
14355
14356 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14357 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14358 }
14359
14360 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14361 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14362 }
14363
14364 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14365 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14366 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14367 }
14368
14369 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14370 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14371 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14372 }
14373
14374 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14375 int32_t fwk_thumb_size[2];
14376 fwk_thumb_size[0] = thumb_size->width;
14377 fwk_thumb_size[1] = thumb_size->height;
14378 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14379 }
14380
14381 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14382 uint8_t fwk_intent = intent[0];
14383 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14384 }
14385}
14386
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014387bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14388 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14389 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014390{
14391 if (hdrPlusRequest == nullptr) return false;
14392
14393 // Check noise reduction mode is high quality.
14394 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14395 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14396 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014397 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14398 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014399 return false;
14400 }
14401
14402 // Check edge mode is high quality.
14403 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14404 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14405 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14406 return false;
14407 }
14408
14409 if (request.num_output_buffers != 1 ||
14410 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14411 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014412 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14413 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14414 request.output_buffers[0].stream->width,
14415 request.output_buffers[0].stream->height,
14416 request.output_buffers[0].stream->format);
14417 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014418 return false;
14419 }
14420
14421 // Get a YUV buffer from pic channel.
14422 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14423 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14424 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14425 if (res != OK) {
14426 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14427 __FUNCTION__, strerror(-res), res);
14428 return false;
14429 }
14430
14431 pbcamera::StreamBuffer buffer;
14432 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014433 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014434 buffer.data = yuvBuffer->buffer;
14435 buffer.dataSize = yuvBuffer->frame_len;
14436
14437 pbcamera::CaptureRequest pbRequest;
14438 pbRequest.id = request.frame_number;
14439 pbRequest.outputBuffers.push_back(buffer);
14440
14441 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014442 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014443 if (res != OK) {
14444 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14445 strerror(-res), res);
14446 return false;
14447 }
14448
14449 hdrPlusRequest->yuvBuffer = yuvBuffer;
14450 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14451
14452 return true;
14453}
14454
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014455status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked() {
14456 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14457 return OK;
14458 }
14459
14460 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14461 if (res != OK) {
14462 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14463 strerror(-res), res);
14464 return res;
14465 }
14466 gHdrPlusClientOpening = true;
14467
14468 return OK;
14469}
14470
Chien-Yu Chenee335912017-02-09 17:53:20 -080014471status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14472{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014473 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014474
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014475 // Check if gHdrPlusClient is opened or being opened.
14476 if (gHdrPlusClient == nullptr) {
14477 if (gHdrPlusClientOpening) {
14478 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14479 return OK;
14480 }
14481
14482 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014483 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014484 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14485 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014486 return res;
14487 }
14488
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014489 // When opening HDR+ client completes, HDR+ mode will be enabled.
14490 return OK;
14491
Chien-Yu Chenee335912017-02-09 17:53:20 -080014492 }
14493
14494 // Configure stream for HDR+.
14495 res = configureHdrPlusStreamsLocked();
14496 if (res != OK) {
14497 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014498 return res;
14499 }
14500
14501 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14502 res = gHdrPlusClient->setZslHdrPlusMode(true);
14503 if (res != OK) {
14504 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014505 return res;
14506 }
14507
14508 mHdrPlusModeEnabled = true;
14509 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14510
14511 return OK;
14512}
14513
14514void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14515{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014516 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014517 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014518 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14519 if (res != OK) {
14520 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14521 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014522
14523 // Close HDR+ client so Easel can enter low power mode.
14524 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14525 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014526 }
14527
14528 mHdrPlusModeEnabled = false;
14529 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14530}
14531
14532status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014533{
14534 pbcamera::InputConfiguration inputConfig;
14535 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14536 status_t res = OK;
14537
14538 // Configure HDR+ client streams.
14539 // Get input config.
14540 if (mHdrPlusRawSrcChannel) {
14541 // HDR+ input buffers will be provided by HAL.
14542 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14543 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14544 if (res != OK) {
14545 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14546 __FUNCTION__, strerror(-res), res);
14547 return res;
14548 }
14549
14550 inputConfig.isSensorInput = false;
14551 } else {
14552 // Sensor MIPI will send data to Easel.
14553 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014554 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014555 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14556 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14557 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14558 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14559 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14560 if (mSensorModeInfo.num_raw_bits != 10) {
14561 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14562 mSensorModeInfo.num_raw_bits);
14563 return BAD_VALUE;
14564 }
14565
14566 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014567 }
14568
14569 // Get output configurations.
14570 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014571 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014572
14573 // Easel may need to output YUV output buffers if mPictureChannel was created.
14574 pbcamera::StreamConfiguration yuvOutputConfig;
14575 if (mPictureChannel != nullptr) {
14576 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14577 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14578 if (res != OK) {
14579 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14580 __FUNCTION__, strerror(-res), res);
14581
14582 return res;
14583 }
14584
14585 outputStreamConfigs.push_back(yuvOutputConfig);
14586 }
14587
14588 // TODO: consider other channels for YUV output buffers.
14589
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014590 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014591 if (res != OK) {
14592 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14593 strerror(-res), res);
14594 return res;
14595 }
14596
14597 return OK;
14598}
14599
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014600void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client) {
14601 if (client == nullptr) {
14602 ALOGE("%s: Opened client is null.", __FUNCTION__);
14603 return;
14604 }
14605
14606 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14607
14608 Mutex::Autolock l(gHdrPlusClientLock);
14609 gHdrPlusClient = std::move(client);
14610 gHdrPlusClientOpening = false;
14611
14612 // Set static metadata.
14613 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14614 if (res != OK) {
14615 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14616 __FUNCTION__, strerror(-res), res);
14617 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14618 gHdrPlusClient = nullptr;
14619 return;
14620 }
14621
14622 // Enable HDR+ mode.
14623 res = enableHdrPlusModeLocked();
14624 if (res != OK) {
14625 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14626 }
14627}
14628
14629void QCamera3HardwareInterface::onOpenFailed(status_t err) {
14630 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14631 Mutex::Autolock l(gHdrPlusClientLock);
14632 gHdrPlusClientOpening = false;
14633}
14634
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014635void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14636 const camera_metadata_t &resultMetadata) {
14637 if (result != nullptr) {
14638 if (result->outputBuffers.size() != 1) {
14639 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14640 result->outputBuffers.size());
14641 return;
14642 }
14643
14644 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14645 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14646 result->outputBuffers[0].streamId);
14647 return;
14648 }
14649
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014650 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014651 HdrPlusPendingRequest pendingRequest;
14652 {
14653 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14654 auto req = mHdrPlusPendingRequests.find(result->requestId);
14655 pendingRequest = req->second;
14656 }
14657
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014658 // Update the result metadata with the settings of the HDR+ still capture request because
14659 // the result metadata belongs to a ZSL buffer.
14660 CameraMetadata metadata;
14661 metadata = &resultMetadata;
14662 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14663 camera_metadata_t* updatedResultMetadata = metadata.release();
14664
14665 QCamera3PicChannel *picChannel =
14666 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14667
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014668 // Check if dumping HDR+ YUV output is enabled.
14669 char prop[PROPERTY_VALUE_MAX];
14670 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14671 bool dumpYuvOutput = atoi(prop);
14672
14673 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014674 // Dump yuv buffer to a ppm file.
14675 pbcamera::StreamConfiguration outputConfig;
14676 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14677 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14678 if (rc == OK) {
14679 char buf[FILENAME_MAX] = {};
14680 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14681 result->requestId, result->outputBuffers[0].streamId,
14682 outputConfig.image.width, outputConfig.image.height);
14683
14684 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14685 } else {
14686 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14687 __FUNCTION__, strerror(-rc), rc);
14688 }
14689 }
14690
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014691 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14692 auto halMetadata = std::make_shared<metadata_buffer_t>();
14693 clear_metadata_buffer(halMetadata.get());
14694
14695 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14696 // encoding.
14697 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14698 halStreamId, /*minFrameDuration*/0);
14699 if (res == OK) {
14700 // Return the buffer to pic channel for encoding.
14701 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14702 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14703 halMetadata);
14704 } else {
14705 // Return the buffer without encoding.
14706 // TODO: This should not happen but we may want to report an error buffer to camera
14707 // service.
14708 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14709 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14710 strerror(-res), res);
14711 }
14712
14713 // Send HDR+ metadata to framework.
14714 {
14715 pthread_mutex_lock(&mMutex);
14716
14717 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14718 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14719 pthread_mutex_unlock(&mMutex);
14720 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014721
14722 // Remove the HDR+ pending request.
14723 {
14724 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14725 auto req = mHdrPlusPendingRequests.find(result->requestId);
14726 mHdrPlusPendingRequests.erase(req);
14727 }
14728 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014729}
14730
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014731void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14732 // TODO: Handle HDR+ capture failures and send the failure to framework.
14733 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14734 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14735
14736 // Return the buffer to pic channel.
14737 QCamera3PicChannel *picChannel =
14738 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14739 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14740
14741 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014742}
14743
Thierry Strudel3d639192016-09-09 11:52:26 -070014744}; //end namespace qcamera