blob: ef0b17785c7f0455f4d98a9b8fc46ebb34dd5241 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
134#define MAX_PREFERRED_ZOOM_RATIO 5.0
135
Thierry Strudel3d639192016-09-09 11:52:26 -0700136cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
137const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
138extern pthread_mutex_t gCamLock;
139volatile uint32_t gCamHal3LogLevel = 1;
140extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700141
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800142// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143// The following Easel related variables must be protected by gHdrPlusClientLock.
144EaselManagerClient gEaselManagerClient;
145bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
146std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
147bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700148bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700149bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800151// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
152bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
154Mutex gHdrPlusClientLock; // Protect above Easel related variables.
155
Thierry Strudel3d639192016-09-09 11:52:26 -0700156
157const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
158 {"On", CAM_CDS_MODE_ON},
159 {"Off", CAM_CDS_MODE_OFF},
160 {"Auto",CAM_CDS_MODE_AUTO}
161};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700162const QCamera3HardwareInterface::QCameraMap<
163 camera_metadata_enum_android_video_hdr_mode_t,
164 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
165 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
166 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
167};
168
Thierry Strudel54dc9782017-02-15 12:12:10 -0800169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_binning_correction_mode_t,
171 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
172 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
173 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
174};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700175
176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_ir_mode_t,
178 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
179 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
180 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
181 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
182};
Thierry Strudel3d639192016-09-09 11:52:26 -0700183
184const QCamera3HardwareInterface::QCameraMap<
185 camera_metadata_enum_android_control_effect_mode_t,
186 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
187 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
188 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
189 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
190 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
191 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
192 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
193 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
194 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
195 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
196};
197
198const QCamera3HardwareInterface::QCameraMap<
199 camera_metadata_enum_android_control_awb_mode_t,
200 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
201 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
202 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
203 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
204 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
205 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
206 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
207 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
208 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
209 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
210};
211
212const QCamera3HardwareInterface::QCameraMap<
213 camera_metadata_enum_android_control_scene_mode_t,
214 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
215 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
216 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
217 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
218 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
219 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
220 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
222 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
223 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
224 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
225 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
226 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
227 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
228 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
229 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800230 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
231 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700232};
233
234const QCamera3HardwareInterface::QCameraMap<
235 camera_metadata_enum_android_control_af_mode_t,
236 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
237 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
238 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
239 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
240 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
241 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
242 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
243 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
244};
245
246const QCamera3HardwareInterface::QCameraMap<
247 camera_metadata_enum_android_color_correction_aberration_mode_t,
248 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
249 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
250 CAM_COLOR_CORRECTION_ABERRATION_OFF },
251 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
252 CAM_COLOR_CORRECTION_ABERRATION_FAST },
253 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
254 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_control_ae_antibanding_mode_t,
259 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
260 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
261 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
262 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_control_ae_mode_t,
268 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
269 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
270 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
271 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
272 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
273 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
274};
275
276const QCamera3HardwareInterface::QCameraMap<
277 camera_metadata_enum_android_flash_mode_t,
278 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
279 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
280 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
281 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_statistics_face_detect_mode_t,
286 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
287 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
288 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
289 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
294 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
295 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
296 CAM_FOCUS_UNCALIBRATED },
297 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
298 CAM_FOCUS_APPROXIMATE },
299 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
300 CAM_FOCUS_CALIBRATED }
301};
302
303const QCamera3HardwareInterface::QCameraMap<
304 camera_metadata_enum_android_lens_state_t,
305 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
306 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
307 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
308};
309
310const int32_t available_thumbnail_sizes[] = {0, 0,
311 176, 144,
312 240, 144,
313 256, 144,
314 240, 160,
315 256, 154,
316 240, 240,
317 320, 240};
318
319const QCamera3HardwareInterface::QCameraMap<
320 camera_metadata_enum_android_sensor_test_pattern_mode_t,
321 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
322 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
323 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
324 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
328};
329
330/* Since there is no mapping for all the options some Android enum are not listed.
331 * Also, the order in this list is important because while mapping from HAL to Android it will
332 * traverse from lower to higher index which means that for HAL values that are map to different
333 * Android values, the traverse logic will select the first one found.
334 */
335const QCamera3HardwareInterface::QCameraMap<
336 camera_metadata_enum_android_sensor_reference_illuminant1_t,
337 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
338 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
339 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
340 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
354};
355
356const QCamera3HardwareInterface::QCameraMap<
357 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
358 { 60, CAM_HFR_MODE_60FPS},
359 { 90, CAM_HFR_MODE_90FPS},
360 { 120, CAM_HFR_MODE_120FPS},
361 { 150, CAM_HFR_MODE_150FPS},
362 { 180, CAM_HFR_MODE_180FPS},
363 { 210, CAM_HFR_MODE_210FPS},
364 { 240, CAM_HFR_MODE_240FPS},
365 { 480, CAM_HFR_MODE_480FPS},
366};
367
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700368const QCamera3HardwareInterface::QCameraMap<
369 qcamera3_ext_instant_aec_mode_t,
370 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
371 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
372 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
373 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
374};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800375
376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_exposure_meter_mode_t,
378 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
379 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
380 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
381 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
382 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
383 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
386};
387
388const QCamera3HardwareInterface::QCameraMap<
389 qcamera3_ext_iso_mode_t,
390 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
391 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
392 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
393 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
394 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
395 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
396 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
397 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
398 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
399};
400
Thierry Strudel3d639192016-09-09 11:52:26 -0700401camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
402 .initialize = QCamera3HardwareInterface::initialize,
403 .configure_streams = QCamera3HardwareInterface::configure_streams,
404 .register_stream_buffers = NULL,
405 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
406 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
407 .get_metadata_vendor_tag_ops = NULL,
408 .dump = QCamera3HardwareInterface::dump,
409 .flush = QCamera3HardwareInterface::flush,
410 .reserved = {0},
411};
412
413// initialise to some default value
414uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
415
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700416static inline void logEaselEvent(const char *tag, const char *event) {
417 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
418 struct timespec ts = {};
419 static int64_t kMsPerSec = 1000;
420 static int64_t kNsPerMs = 1000000;
421 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
422 if (res != OK) {
423 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
424 } else {
425 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
426 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
427 }
428 }
429}
430
Thierry Strudel3d639192016-09-09 11:52:26 -0700431/*===========================================================================
432 * FUNCTION : QCamera3HardwareInterface
433 *
434 * DESCRIPTION: constructor of QCamera3HardwareInterface
435 *
436 * PARAMETERS :
437 * @cameraId : camera ID
438 *
439 * RETURN : none
440 *==========================================================================*/
441QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
442 const camera_module_callbacks_t *callbacks)
443 : mCameraId(cameraId),
444 mCameraHandle(NULL),
445 mCameraInitialized(false),
446 mCallbackOps(NULL),
447 mMetadataChannel(NULL),
448 mPictureChannel(NULL),
449 mRawChannel(NULL),
450 mSupportChannel(NULL),
451 mAnalysisChannel(NULL),
452 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700453 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800455 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mChannelHandle(0),
458 mFirstConfiguration(true),
459 mFlush(false),
460 mFlushPerf(false),
461 mParamHeap(NULL),
462 mParameters(NULL),
463 mPrevParameters(NULL),
464 m_bIsVideo(false),
465 m_bIs4KVideo(false),
466 m_bEisSupportedSize(false),
467 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800468 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700469 m_MobicatMask(0),
470 mMinProcessedFrameDuration(0),
471 mMinJpegFrameDuration(0),
472 mMinRawFrameDuration(0),
473 mMetaFrameCount(0U),
474 mUpdateDebugLevel(false),
475 mCallbacks(callbacks),
476 mCaptureIntent(0),
477 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700478 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800479 /* DevCamDebug metadata internal m control*/
480 mDevCamDebugMetaEnable(0),
481 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mBatchSize(0),
483 mToBeQueuedVidBufs(0),
484 mHFRVideoFps(DEFAULT_VIDEO_FPS),
485 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800486 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800487 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700488 mFirstFrameNumberInBatch(0),
489 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800490 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700491 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
492 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000493 mPDSupported(false),
494 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700495 mInstantAEC(false),
496 mResetInstantAEC(false),
497 mInstantAECSettledFrameNumber(0),
498 mAecSkipDisplayFrameBound(0),
499 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800500 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700502 mLastCustIntentFrmNum(-1),
503 mState(CLOSED),
504 mIsDeviceLinked(false),
505 mIsMainCamera(true),
506 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700507 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800508 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800509 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700510 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800511 mIsApInputUsedForHdrPlus(false),
512 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800513 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700514{
515 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mCommon.init(gCamCapability[cameraId]);
517 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700518#ifndef USE_HAL_3_3
519 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
520#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700521 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700522#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700523 mCameraDevice.common.close = close_camera_device;
524 mCameraDevice.ops = &mCameraOps;
525 mCameraDevice.priv = this;
526 gCamCapability[cameraId]->version = CAM_HAL_V3;
527 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
528 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
529 gCamCapability[cameraId]->min_num_pp_bufs = 3;
530
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800531 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700532
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800533 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mPendingLiveRequest = 0;
535 mCurrentRequestId = -1;
536 pthread_mutex_init(&mMutex, NULL);
537
538 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
539 mDefaultMetadata[i] = NULL;
540
541 // Getting system props of different kinds
542 char prop[PROPERTY_VALUE_MAX];
543 memset(prop, 0, sizeof(prop));
544 property_get("persist.camera.raw.dump", prop, "0");
545 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800546 property_get("persist.camera.hal3.force.hdr", prop, "0");
547 mForceHdrSnapshot = atoi(prop);
548
Thierry Strudel3d639192016-09-09 11:52:26 -0700549 if (mEnableRawDump)
550 LOGD("Raw dump from Camera HAL enabled");
551
552 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
553 memset(mLdafCalib, 0, sizeof(mLdafCalib));
554
555 memset(prop, 0, sizeof(prop));
556 property_get("persist.camera.tnr.preview", prop, "0");
557 m_bTnrPreview = (uint8_t)atoi(prop);
558
559 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800560 property_get("persist.camera.swtnr.preview", prop, "1");
561 m_bSwTnrPreview = (uint8_t)atoi(prop);
562
563 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700564 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700565 m_bTnrVideo = (uint8_t)atoi(prop);
566
567 memset(prop, 0, sizeof(prop));
568 property_get("persist.camera.avtimer.debug", prop, "0");
569 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800570 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700571
Thierry Strudel54dc9782017-02-15 12:12:10 -0800572 memset(prop, 0, sizeof(prop));
573 property_get("persist.camera.cacmode.disable", prop, "0");
574 m_cacModeDisabled = (uint8_t)atoi(prop);
575
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 //Load and read GPU library.
577 lib_surface_utils = NULL;
578 LINK_get_surface_pixel_alignment = NULL;
579 mSurfaceStridePadding = CAM_PAD_TO_32;
580 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
581 if (lib_surface_utils) {
582 *(void **)&LINK_get_surface_pixel_alignment =
583 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
584 if (LINK_get_surface_pixel_alignment) {
585 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
586 }
587 dlclose(lib_surface_utils);
588 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700589
Emilian Peev0f3c3162017-03-15 12:57:46 +0000590 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
591 mPDSupported = (0 <= mPDIndex) ? true : false;
592
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700593 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700594}
595
596/*===========================================================================
597 * FUNCTION : ~QCamera3HardwareInterface
598 *
599 * DESCRIPTION: destructor of QCamera3HardwareInterface
600 *
601 * PARAMETERS : none
602 *
603 * RETURN : none
604 *==========================================================================*/
605QCamera3HardwareInterface::~QCamera3HardwareInterface()
606{
607 LOGD("E");
608
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800609 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700610
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800611 // Disable power hint and enable the perf lock for close camera
612 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
613 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
614
615 // unlink of dualcam during close camera
616 if (mIsDeviceLinked) {
617 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
618 &m_pDualCamCmdPtr->bundle_info;
619 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
620 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
621 pthread_mutex_lock(&gCamLock);
622
623 if (mIsMainCamera == 1) {
624 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
625 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
626 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
627 // related session id should be session id of linked session
628 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
629 } else {
630 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
631 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
632 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
633 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
634 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800635 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800636 pthread_mutex_unlock(&gCamLock);
637
638 rc = mCameraHandle->ops->set_dual_cam_cmd(
639 mCameraHandle->camera_handle);
640 if (rc < 0) {
641 LOGE("Dualcam: Unlink failed, but still proceed to close");
642 }
643 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700644
645 /* We need to stop all streams before deleting any stream */
646 if (mRawDumpChannel) {
647 mRawDumpChannel->stop();
648 }
649
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700650 if (mHdrPlusRawSrcChannel) {
651 mHdrPlusRawSrcChannel->stop();
652 }
653
Thierry Strudel3d639192016-09-09 11:52:26 -0700654 // NOTE: 'camera3_stream_t *' objects are already freed at
655 // this stage by the framework
656 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
657 it != mStreamInfo.end(); it++) {
658 QCamera3ProcessingChannel *channel = (*it)->channel;
659 if (channel) {
660 channel->stop();
661 }
662 }
663 if (mSupportChannel)
664 mSupportChannel->stop();
665
666 if (mAnalysisChannel) {
667 mAnalysisChannel->stop();
668 }
669 if (mMetadataChannel) {
670 mMetadataChannel->stop();
671 }
672 if (mChannelHandle) {
673 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
674 mChannelHandle);
675 LOGD("stopping channel %d", mChannelHandle);
676 }
677
678 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
679 it != mStreamInfo.end(); it++) {
680 QCamera3ProcessingChannel *channel = (*it)->channel;
681 if (channel)
682 delete channel;
683 free (*it);
684 }
685 if (mSupportChannel) {
686 delete mSupportChannel;
687 mSupportChannel = NULL;
688 }
689
690 if (mAnalysisChannel) {
691 delete mAnalysisChannel;
692 mAnalysisChannel = NULL;
693 }
694 if (mRawDumpChannel) {
695 delete mRawDumpChannel;
696 mRawDumpChannel = NULL;
697 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700698 if (mHdrPlusRawSrcChannel) {
699 delete mHdrPlusRawSrcChannel;
700 mHdrPlusRawSrcChannel = NULL;
701 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700702 if (mDummyBatchChannel) {
703 delete mDummyBatchChannel;
704 mDummyBatchChannel = NULL;
705 }
706
707 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800708 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700709
710 if (mMetadataChannel) {
711 delete mMetadataChannel;
712 mMetadataChannel = NULL;
713 }
714
715 /* Clean up all channels */
716 if (mCameraInitialized) {
717 if(!mFirstConfiguration){
718 //send the last unconfigure
719 cam_stream_size_info_t stream_config_info;
720 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
721 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
722 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800723 m_bIs4KVideo ? 0 :
724 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700725 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700726 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
727 stream_config_info);
728 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
729 if (rc < 0) {
730 LOGE("set_parms failed for unconfigure");
731 }
732 }
733 deinitParameters();
734 }
735
736 if (mChannelHandle) {
737 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
738 mChannelHandle);
739 LOGH("deleting channel %d", mChannelHandle);
740 mChannelHandle = 0;
741 }
742
743 if (mState != CLOSED)
744 closeCamera();
745
746 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
747 req.mPendingBufferList.clear();
748 }
749 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700750 for (pendingRequestIterator i = mPendingRequestsList.begin();
751 i != mPendingRequestsList.end();) {
752 i = erasePendingRequest(i);
753 }
754 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
755 if (mDefaultMetadata[i])
756 free_camera_metadata(mDefaultMetadata[i]);
757
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800758 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700759
760 pthread_cond_destroy(&mRequestCond);
761
762 pthread_cond_destroy(&mBuffersCond);
763
764 pthread_mutex_destroy(&mMutex);
765 LOGD("X");
766}
767
768/*===========================================================================
769 * FUNCTION : erasePendingRequest
770 *
771 * DESCRIPTION: function to erase a desired pending request after freeing any
772 * allocated memory
773 *
774 * PARAMETERS :
775 * @i : iterator pointing to pending request to be erased
776 *
777 * RETURN : iterator pointing to the next request
778 *==========================================================================*/
779QCamera3HardwareInterface::pendingRequestIterator
780 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
781{
782 if (i->input_buffer != NULL) {
783 free(i->input_buffer);
784 i->input_buffer = NULL;
785 }
786 if (i->settings != NULL)
787 free_camera_metadata((camera_metadata_t*)i->settings);
788 return mPendingRequestsList.erase(i);
789}
790
791/*===========================================================================
792 * FUNCTION : camEvtHandle
793 *
794 * DESCRIPTION: Function registered to mm-camera-interface to handle events
795 *
796 * PARAMETERS :
797 * @camera_handle : interface layer camera handle
798 * @evt : ptr to event
799 * @user_data : user data ptr
800 *
801 * RETURN : none
802 *==========================================================================*/
803void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
804 mm_camera_event_t *evt,
805 void *user_data)
806{
807 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
808 if (obj && evt) {
809 switch(evt->server_event_type) {
810 case CAM_EVENT_TYPE_DAEMON_DIED:
811 pthread_mutex_lock(&obj->mMutex);
812 obj->mState = ERROR;
813 pthread_mutex_unlock(&obj->mMutex);
814 LOGE("Fatal, camera daemon died");
815 break;
816
817 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
818 LOGD("HAL got request pull from Daemon");
819 pthread_mutex_lock(&obj->mMutex);
820 obj->mWokenUpByDaemon = true;
821 obj->unblockRequestIfNecessary();
822 pthread_mutex_unlock(&obj->mMutex);
823 break;
824
825 default:
826 LOGW("Warning: Unhandled event %d",
827 evt->server_event_type);
828 break;
829 }
830 } else {
831 LOGE("NULL user_data/evt");
832 }
833}
834
835/*===========================================================================
836 * FUNCTION : openCamera
837 *
838 * DESCRIPTION: open camera
839 *
840 * PARAMETERS :
841 * @hw_device : double ptr for camera device struct
842 *
843 * RETURN : int32_t type of status
844 * NO_ERROR -- success
845 * none-zero failure code
846 *==========================================================================*/
847int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
848{
849 int rc = 0;
850 if (mState != CLOSED) {
851 *hw_device = NULL;
852 return PERMISSION_DENIED;
853 }
854
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800855 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700856 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
857 mCameraId);
858
859 rc = openCamera();
860 if (rc == 0) {
861 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800862 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700863 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800864 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700865
Thierry Strudel3d639192016-09-09 11:52:26 -0700866 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
867 mCameraId, rc);
868
869 if (rc == NO_ERROR) {
870 mState = OPENED;
871 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800872
Thierry Strudel3d639192016-09-09 11:52:26 -0700873 return rc;
874}
875
876/*===========================================================================
877 * FUNCTION : openCamera
878 *
879 * DESCRIPTION: open camera
880 *
881 * PARAMETERS : none
882 *
883 * RETURN : int32_t type of status
884 * NO_ERROR -- success
885 * none-zero failure code
886 *==========================================================================*/
887int QCamera3HardwareInterface::openCamera()
888{
889 int rc = 0;
890 char value[PROPERTY_VALUE_MAX];
891
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800892 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700893 if (mCameraHandle) {
894 LOGE("Failure: Camera already opened");
895 return ALREADY_EXISTS;
896 }
897
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700898 {
899 Mutex::Autolock l(gHdrPlusClientLock);
900 if (gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700901 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700902 rc = gEaselManagerClient.resume();
903 if (rc != 0) {
904 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
905 return rc;
906 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800907 }
908 }
909
Thierry Strudel3d639192016-09-09 11:52:26 -0700910 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
911 if (rc < 0) {
912 LOGE("Failed to reserve flash for camera id: %d",
913 mCameraId);
914 return UNKNOWN_ERROR;
915 }
916
917 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
918 if (rc) {
919 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
920 return rc;
921 }
922
923 if (!mCameraHandle) {
924 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
925 return -ENODEV;
926 }
927
928 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
929 camEvtHandle, (void *)this);
930
931 if (rc < 0) {
932 LOGE("Error, failed to register event callback");
933 /* Not closing camera here since it is already handled in destructor */
934 return FAILED_TRANSACTION;
935 }
936
937 mExifParams.debug_params =
938 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
939 if (mExifParams.debug_params) {
940 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
941 } else {
942 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
943 return NO_MEMORY;
944 }
945 mFirstConfiguration = true;
946
947 //Notify display HAL that a camera session is active.
948 //But avoid calling the same during bootup because camera service might open/close
949 //cameras at boot time during its initialization and display service will also internally
950 //wait for camera service to initialize first while calling this display API, resulting in a
951 //deadlock situation. Since boot time camera open/close calls are made only to fetch
952 //capabilities, no need of this display bw optimization.
953 //Use "service.bootanim.exit" property to know boot status.
954 property_get("service.bootanim.exit", value, "0");
955 if (atoi(value) == 1) {
956 pthread_mutex_lock(&gCamLock);
957 if (gNumCameraSessions++ == 0) {
958 setCameraLaunchStatus(true);
959 }
960 pthread_mutex_unlock(&gCamLock);
961 }
962
963 //fill the session id needed while linking dual cam
964 pthread_mutex_lock(&gCamLock);
965 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
966 &sessionId[mCameraId]);
967 pthread_mutex_unlock(&gCamLock);
968
969 if (rc < 0) {
970 LOGE("Error, failed to get sessiion id");
971 return UNKNOWN_ERROR;
972 } else {
973 //Allocate related cam sync buffer
974 //this is needed for the payload that goes along with bundling cmd for related
975 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700976 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
977 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700978 if(rc != OK) {
979 rc = NO_MEMORY;
980 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
981 return NO_MEMORY;
982 }
983
984 //Map memory for related cam sync buffer
985 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700986 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
987 m_pDualCamCmdHeap->getFd(0),
988 sizeof(cam_dual_camera_cmd_info_t),
989 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700990 if(rc < 0) {
991 LOGE("Dualcam: failed to map Related cam sync buffer");
992 rc = FAILED_TRANSACTION;
993 return NO_MEMORY;
994 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700995 m_pDualCamCmdPtr =
996 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700997 }
998
999 LOGH("mCameraId=%d",mCameraId);
1000
1001 return NO_ERROR;
1002}
1003
1004/*===========================================================================
1005 * FUNCTION : closeCamera
1006 *
1007 * DESCRIPTION: close camera
1008 *
1009 * PARAMETERS : none
1010 *
1011 * RETURN : int32_t type of status
1012 * NO_ERROR -- success
1013 * none-zero failure code
1014 *==========================================================================*/
1015int QCamera3HardwareInterface::closeCamera()
1016{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001017 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001018 int rc = NO_ERROR;
1019 char value[PROPERTY_VALUE_MAX];
1020
1021 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1022 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001023
1024 // unmap memory for related cam sync buffer
1025 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001026 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001027 if (NULL != m_pDualCamCmdHeap) {
1028 m_pDualCamCmdHeap->deallocate();
1029 delete m_pDualCamCmdHeap;
1030 m_pDualCamCmdHeap = NULL;
1031 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001032 }
1033
Thierry Strudel3d639192016-09-09 11:52:26 -07001034 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1035 mCameraHandle = NULL;
1036
1037 //reset session id to some invalid id
1038 pthread_mutex_lock(&gCamLock);
1039 sessionId[mCameraId] = 0xDEADBEEF;
1040 pthread_mutex_unlock(&gCamLock);
1041
1042 //Notify display HAL that there is no active camera session
1043 //but avoid calling the same during bootup. Refer to openCamera
1044 //for more details.
1045 property_get("service.bootanim.exit", value, "0");
1046 if (atoi(value) == 1) {
1047 pthread_mutex_lock(&gCamLock);
1048 if (--gNumCameraSessions == 0) {
1049 setCameraLaunchStatus(false);
1050 }
1051 pthread_mutex_unlock(&gCamLock);
1052 }
1053
Thierry Strudel3d639192016-09-09 11:52:26 -07001054 if (mExifParams.debug_params) {
1055 free(mExifParams.debug_params);
1056 mExifParams.debug_params = NULL;
1057 }
1058 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1059 LOGW("Failed to release flash for camera id: %d",
1060 mCameraId);
1061 }
1062 mState = CLOSED;
1063 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1064 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001065
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001066 {
1067 Mutex::Autolock l(gHdrPlusClientLock);
1068 if (gHdrPlusClient != nullptr) {
1069 // Disable HDR+ mode.
1070 disableHdrPlusModeLocked();
1071 // Disconnect Easel if it's connected.
1072 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1073 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001074 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001075
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001076 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001077 rc = gEaselManagerClient.stopMipi(mCameraId);
1078 if (rc != 0) {
1079 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1080 }
1081
1082 rc = gEaselManagerClient.suspend();
1083 if (rc != 0) {
1084 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1085 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001086 }
1087 }
1088
Thierry Strudel3d639192016-09-09 11:52:26 -07001089 return rc;
1090}
1091
1092/*===========================================================================
1093 * FUNCTION : initialize
1094 *
1095 * DESCRIPTION: Initialize frameworks callback functions
1096 *
1097 * PARAMETERS :
1098 * @callback_ops : callback function to frameworks
1099 *
1100 * RETURN :
1101 *
1102 *==========================================================================*/
1103int QCamera3HardwareInterface::initialize(
1104 const struct camera3_callback_ops *callback_ops)
1105{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001106 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001107 int rc;
1108
1109 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1110 pthread_mutex_lock(&mMutex);
1111
1112 // Validate current state
1113 switch (mState) {
1114 case OPENED:
1115 /* valid state */
1116 break;
1117 default:
1118 LOGE("Invalid state %d", mState);
1119 rc = -ENODEV;
1120 goto err1;
1121 }
1122
1123 rc = initParameters();
1124 if (rc < 0) {
1125 LOGE("initParamters failed %d", rc);
1126 goto err1;
1127 }
1128 mCallbackOps = callback_ops;
1129
1130 mChannelHandle = mCameraHandle->ops->add_channel(
1131 mCameraHandle->camera_handle, NULL, NULL, this);
1132 if (mChannelHandle == 0) {
1133 LOGE("add_channel failed");
1134 rc = -ENOMEM;
1135 pthread_mutex_unlock(&mMutex);
1136 return rc;
1137 }
1138
1139 pthread_mutex_unlock(&mMutex);
1140 mCameraInitialized = true;
1141 mState = INITIALIZED;
1142 LOGI("X");
1143 return 0;
1144
1145err1:
1146 pthread_mutex_unlock(&mMutex);
1147 return rc;
1148}
1149
1150/*===========================================================================
1151 * FUNCTION : validateStreamDimensions
1152 *
1153 * DESCRIPTION: Check if the configuration requested are those advertised
1154 *
1155 * PARAMETERS :
1156 * @stream_list : streams to be configured
1157 *
1158 * RETURN :
1159 *
1160 *==========================================================================*/
1161int QCamera3HardwareInterface::validateStreamDimensions(
1162 camera3_stream_configuration_t *streamList)
1163{
1164 int rc = NO_ERROR;
1165 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001166 uint32_t depthWidth = 0;
1167 uint32_t depthHeight = 0;
1168 if (mPDSupported) {
1169 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1170 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1171 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001172
1173 camera3_stream_t *inputStream = NULL;
1174 /*
1175 * Loop through all streams to find input stream if it exists*
1176 */
1177 for (size_t i = 0; i< streamList->num_streams; i++) {
1178 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1179 if (inputStream != NULL) {
1180 LOGE("Error, Multiple input streams requested");
1181 return -EINVAL;
1182 }
1183 inputStream = streamList->streams[i];
1184 }
1185 }
1186 /*
1187 * Loop through all streams requested in configuration
1188 * Check if unsupported sizes have been requested on any of them
1189 */
1190 for (size_t j = 0; j < streamList->num_streams; j++) {
1191 bool sizeFound = false;
1192 camera3_stream_t *newStream = streamList->streams[j];
1193
1194 uint32_t rotatedHeight = newStream->height;
1195 uint32_t rotatedWidth = newStream->width;
1196 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1197 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1198 rotatedHeight = newStream->width;
1199 rotatedWidth = newStream->height;
1200 }
1201
1202 /*
1203 * Sizes are different for each type of stream format check against
1204 * appropriate table.
1205 */
1206 switch (newStream->format) {
1207 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1208 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1209 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001210 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1211 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1212 mPDSupported) {
1213 if ((depthWidth == newStream->width) &&
1214 (depthHeight == newStream->height)) {
1215 sizeFound = true;
1216 }
1217 break;
1218 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001219 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1220 for (size_t i = 0; i < count; i++) {
1221 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1222 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1223 sizeFound = true;
1224 break;
1225 }
1226 }
1227 break;
1228 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001229 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1230 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001231 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001232 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001233 if ((depthSamplesCount == newStream->width) &&
1234 (1 == newStream->height)) {
1235 sizeFound = true;
1236 }
1237 break;
1238 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001239 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1240 /* Verify set size against generated sizes table */
1241 for (size_t i = 0; i < count; i++) {
1242 if (((int32_t)rotatedWidth ==
1243 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1244 ((int32_t)rotatedHeight ==
1245 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1246 sizeFound = true;
1247 break;
1248 }
1249 }
1250 break;
1251 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1252 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1253 default:
1254 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1255 || newStream->stream_type == CAMERA3_STREAM_INPUT
1256 || IS_USAGE_ZSL(newStream->usage)) {
1257 if (((int32_t)rotatedWidth ==
1258 gCamCapability[mCameraId]->active_array_size.width) &&
1259 ((int32_t)rotatedHeight ==
1260 gCamCapability[mCameraId]->active_array_size.height)) {
1261 sizeFound = true;
1262 break;
1263 }
1264 /* We could potentially break here to enforce ZSL stream
1265 * set from frameworks always is full active array size
1266 * but it is not clear from the spc if framework will always
1267 * follow that, also we have logic to override to full array
1268 * size, so keeping the logic lenient at the moment
1269 */
1270 }
1271 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1272 MAX_SIZES_CNT);
1273 for (size_t i = 0; i < count; i++) {
1274 if (((int32_t)rotatedWidth ==
1275 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1276 ((int32_t)rotatedHeight ==
1277 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1278 sizeFound = true;
1279 break;
1280 }
1281 }
1282 break;
1283 } /* End of switch(newStream->format) */
1284
1285 /* We error out even if a single stream has unsupported size set */
1286 if (!sizeFound) {
1287 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1288 rotatedWidth, rotatedHeight, newStream->format,
1289 gCamCapability[mCameraId]->active_array_size.width,
1290 gCamCapability[mCameraId]->active_array_size.height);
1291 rc = -EINVAL;
1292 break;
1293 }
1294 } /* End of for each stream */
1295 return rc;
1296}
1297
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001298/*===========================================================================
1299 * FUNCTION : validateUsageFlags
1300 *
1301 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1302 *
1303 * PARAMETERS :
1304 * @stream_list : streams to be configured
1305 *
1306 * RETURN :
1307 * NO_ERROR if the usage flags are supported
1308 * error code if usage flags are not supported
1309 *
1310 *==========================================================================*/
1311int QCamera3HardwareInterface::validateUsageFlags(
1312 const camera3_stream_configuration_t* streamList)
1313{
1314 for (size_t j = 0; j < streamList->num_streams; j++) {
1315 const camera3_stream_t *newStream = streamList->streams[j];
1316
1317 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1318 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1319 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1320 continue;
1321 }
1322
1323 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1324 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1325 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1326 bool forcePreviewUBWC = true;
1327 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1328 forcePreviewUBWC = false;
1329 }
1330 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1331 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1332 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1333 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1334 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1335 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1336
1337 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1338 // So color spaces will always match.
1339
1340 // Check whether underlying formats of shared streams match.
1341 if (isVideo && isPreview && videoFormat != previewFormat) {
1342 LOGE("Combined video and preview usage flag is not supported");
1343 return -EINVAL;
1344 }
1345 if (isPreview && isZSL && previewFormat != zslFormat) {
1346 LOGE("Combined preview and zsl usage flag is not supported");
1347 return -EINVAL;
1348 }
1349 if (isVideo && isZSL && videoFormat != zslFormat) {
1350 LOGE("Combined video and zsl usage flag is not supported");
1351 return -EINVAL;
1352 }
1353 }
1354 return NO_ERROR;
1355}
1356
1357/*===========================================================================
1358 * FUNCTION : validateUsageFlagsForEis
1359 *
1360 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1361 *
1362 * PARAMETERS :
1363 * @stream_list : streams to be configured
1364 *
1365 * RETURN :
1366 * NO_ERROR if the usage flags are supported
1367 * error code if usage flags are not supported
1368 *
1369 *==========================================================================*/
1370int QCamera3HardwareInterface::validateUsageFlagsForEis(
1371 const camera3_stream_configuration_t* streamList)
1372{
1373 for (size_t j = 0; j < streamList->num_streams; j++) {
1374 const camera3_stream_t *newStream = streamList->streams[j];
1375
1376 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1377 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1378
1379 // Because EIS is "hard-coded" for certain use case, and current
1380 // implementation doesn't support shared preview and video on the same
1381 // stream, return failure if EIS is forced on.
1382 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1383 LOGE("Combined video and preview usage flag is not supported due to EIS");
1384 return -EINVAL;
1385 }
1386 }
1387 return NO_ERROR;
1388}
1389
Thierry Strudel3d639192016-09-09 11:52:26 -07001390/*==============================================================================
1391 * FUNCTION : isSupportChannelNeeded
1392 *
1393 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1394 *
1395 * PARAMETERS :
1396 * @stream_list : streams to be configured
1397 * @stream_config_info : the config info for streams to be configured
1398 *
1399 * RETURN : Boolen true/false decision
1400 *
1401 *==========================================================================*/
1402bool QCamera3HardwareInterface::isSupportChannelNeeded(
1403 camera3_stream_configuration_t *streamList,
1404 cam_stream_size_info_t stream_config_info)
1405{
1406 uint32_t i;
1407 bool pprocRequested = false;
1408 /* Check for conditions where PProc pipeline does not have any streams*/
1409 for (i = 0; i < stream_config_info.num_streams; i++) {
1410 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1411 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1412 pprocRequested = true;
1413 break;
1414 }
1415 }
1416
1417 if (pprocRequested == false )
1418 return true;
1419
1420 /* Dummy stream needed if only raw or jpeg streams present */
1421 for (i = 0; i < streamList->num_streams; i++) {
1422 switch(streamList->streams[i]->format) {
1423 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1424 case HAL_PIXEL_FORMAT_RAW10:
1425 case HAL_PIXEL_FORMAT_RAW16:
1426 case HAL_PIXEL_FORMAT_BLOB:
1427 break;
1428 default:
1429 return false;
1430 }
1431 }
1432 return true;
1433}
1434
1435/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001436 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001437 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001438 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001439 *
1440 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001441 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001442 *
1443 * RETURN : int32_t type of status
1444 * NO_ERROR -- success
1445 * none-zero failure code
1446 *
1447 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001448int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001449{
1450 int32_t rc = NO_ERROR;
1451
1452 cam_dimension_t max_dim = {0, 0};
1453 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1454 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1455 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1456 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1457 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1458 }
1459
1460 clear_metadata_buffer(mParameters);
1461
1462 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1463 max_dim);
1464 if (rc != NO_ERROR) {
1465 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1466 return rc;
1467 }
1468
1469 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1470 if (rc != NO_ERROR) {
1471 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1472 return rc;
1473 }
1474
1475 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001476 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001477
1478 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1479 mParameters);
1480 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001481 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001482 return rc;
1483 }
1484
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001485 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001486 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1487 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1488 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1489 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1490 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001491
1492 return rc;
1493}
1494
1495/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001496 * FUNCTION : addToPPFeatureMask
1497 *
1498 * DESCRIPTION: add additional features to pp feature mask based on
1499 * stream type and usecase
1500 *
1501 * PARAMETERS :
1502 * @stream_format : stream type for feature mask
1503 * @stream_idx : stream idx within postprocess_mask list to change
1504 *
1505 * RETURN : NULL
1506 *
1507 *==========================================================================*/
1508void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1509 uint32_t stream_idx)
1510{
1511 char feature_mask_value[PROPERTY_VALUE_MAX];
1512 cam_feature_mask_t feature_mask;
1513 int args_converted;
1514 int property_len;
1515
1516 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001517#ifdef _LE_CAMERA_
1518 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1519 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1520 property_len = property_get("persist.camera.hal3.feature",
1521 feature_mask_value, swtnr_feature_mask_value);
1522#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001523 property_len = property_get("persist.camera.hal3.feature",
1524 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001525#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001526 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1527 (feature_mask_value[1] == 'x')) {
1528 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1529 } else {
1530 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1531 }
1532 if (1 != args_converted) {
1533 feature_mask = 0;
1534 LOGE("Wrong feature mask %s", feature_mask_value);
1535 return;
1536 }
1537
1538 switch (stream_format) {
1539 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1540 /* Add LLVD to pp feature mask only if video hint is enabled */
1541 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1542 mStreamConfigInfo.postprocess_mask[stream_idx]
1543 |= CAM_QTI_FEATURE_SW_TNR;
1544 LOGH("Added SW TNR to pp feature mask");
1545 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1546 mStreamConfigInfo.postprocess_mask[stream_idx]
1547 |= CAM_QCOM_FEATURE_LLVD;
1548 LOGH("Added LLVD SeeMore to pp feature mask");
1549 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001550 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1551 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1552 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1553 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001554 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1555 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1556 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1557 CAM_QTI_FEATURE_BINNING_CORRECTION;
1558 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001559 break;
1560 }
1561 default:
1562 break;
1563 }
1564 LOGD("PP feature mask %llx",
1565 mStreamConfigInfo.postprocess_mask[stream_idx]);
1566}
1567
1568/*==============================================================================
1569 * FUNCTION : updateFpsInPreviewBuffer
1570 *
1571 * DESCRIPTION: update FPS information in preview buffer.
1572 *
1573 * PARAMETERS :
1574 * @metadata : pointer to metadata buffer
1575 * @frame_number: frame_number to look for in pending buffer list
1576 *
1577 * RETURN : None
1578 *
1579 *==========================================================================*/
1580void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1581 uint32_t frame_number)
1582{
1583 // Mark all pending buffers for this particular request
1584 // with corresponding framerate information
1585 for (List<PendingBuffersInRequest>::iterator req =
1586 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1587 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1588 for(List<PendingBufferInfo>::iterator j =
1589 req->mPendingBufferList.begin();
1590 j != req->mPendingBufferList.end(); j++) {
1591 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1592 if ((req->frame_number == frame_number) &&
1593 (channel->getStreamTypeMask() &
1594 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1595 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1596 CAM_INTF_PARM_FPS_RANGE, metadata) {
1597 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1598 struct private_handle_t *priv_handle =
1599 (struct private_handle_t *)(*(j->buffer));
1600 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1601 }
1602 }
1603 }
1604 }
1605}
1606
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001607/*==============================================================================
1608 * FUNCTION : updateTimeStampInPendingBuffers
1609 *
1610 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1611 * of a frame number
1612 *
1613 * PARAMETERS :
1614 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1615 * @timestamp : timestamp to be set
1616 *
1617 * RETURN : None
1618 *
1619 *==========================================================================*/
1620void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1621 uint32_t frameNumber, nsecs_t timestamp)
1622{
1623 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1624 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1625 if (req->frame_number != frameNumber)
1626 continue;
1627
1628 for (auto k = req->mPendingBufferList.begin();
1629 k != req->mPendingBufferList.end(); k++ ) {
1630 struct private_handle_t *priv_handle =
1631 (struct private_handle_t *) (*(k->buffer));
1632 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1633 }
1634 }
1635 return;
1636}
1637
Thierry Strudel3d639192016-09-09 11:52:26 -07001638/*===========================================================================
1639 * FUNCTION : configureStreams
1640 *
1641 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1642 * and output streams.
1643 *
1644 * PARAMETERS :
1645 * @stream_list : streams to be configured
1646 *
1647 * RETURN :
1648 *
1649 *==========================================================================*/
1650int QCamera3HardwareInterface::configureStreams(
1651 camera3_stream_configuration_t *streamList)
1652{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001653 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001654 int rc = 0;
1655
1656 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001657 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001658 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001659 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001660
1661 return rc;
1662}
1663
1664/*===========================================================================
1665 * FUNCTION : configureStreamsPerfLocked
1666 *
1667 * DESCRIPTION: configureStreams while perfLock is held.
1668 *
1669 * PARAMETERS :
1670 * @stream_list : streams to be configured
1671 *
1672 * RETURN : int32_t type of status
1673 * NO_ERROR -- success
1674 * none-zero failure code
1675 *==========================================================================*/
1676int QCamera3HardwareInterface::configureStreamsPerfLocked(
1677 camera3_stream_configuration_t *streamList)
1678{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001679 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001680 int rc = 0;
1681
1682 // Sanity check stream_list
1683 if (streamList == NULL) {
1684 LOGE("NULL stream configuration");
1685 return BAD_VALUE;
1686 }
1687 if (streamList->streams == NULL) {
1688 LOGE("NULL stream list");
1689 return BAD_VALUE;
1690 }
1691
1692 if (streamList->num_streams < 1) {
1693 LOGE("Bad number of streams requested: %d",
1694 streamList->num_streams);
1695 return BAD_VALUE;
1696 }
1697
1698 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1699 LOGE("Maximum number of streams %d exceeded: %d",
1700 MAX_NUM_STREAMS, streamList->num_streams);
1701 return BAD_VALUE;
1702 }
1703
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001704 rc = validateUsageFlags(streamList);
1705 if (rc != NO_ERROR) {
1706 return rc;
1707 }
1708
Thierry Strudel3d639192016-09-09 11:52:26 -07001709 mOpMode = streamList->operation_mode;
1710 LOGD("mOpMode: %d", mOpMode);
1711
1712 /* first invalidate all the steams in the mStreamList
1713 * if they appear again, they will be validated */
1714 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1715 it != mStreamInfo.end(); it++) {
1716 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1717 if (channel) {
1718 channel->stop();
1719 }
1720 (*it)->status = INVALID;
1721 }
1722
1723 if (mRawDumpChannel) {
1724 mRawDumpChannel->stop();
1725 delete mRawDumpChannel;
1726 mRawDumpChannel = NULL;
1727 }
1728
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001729 if (mHdrPlusRawSrcChannel) {
1730 mHdrPlusRawSrcChannel->stop();
1731 delete mHdrPlusRawSrcChannel;
1732 mHdrPlusRawSrcChannel = NULL;
1733 }
1734
Thierry Strudel3d639192016-09-09 11:52:26 -07001735 if (mSupportChannel)
1736 mSupportChannel->stop();
1737
1738 if (mAnalysisChannel) {
1739 mAnalysisChannel->stop();
1740 }
1741 if (mMetadataChannel) {
1742 /* If content of mStreamInfo is not 0, there is metadata stream */
1743 mMetadataChannel->stop();
1744 }
1745 if (mChannelHandle) {
1746 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1747 mChannelHandle);
1748 LOGD("stopping channel %d", mChannelHandle);
1749 }
1750
1751 pthread_mutex_lock(&mMutex);
1752
1753 // Check state
1754 switch (mState) {
1755 case INITIALIZED:
1756 case CONFIGURED:
1757 case STARTED:
1758 /* valid state */
1759 break;
1760 default:
1761 LOGE("Invalid state %d", mState);
1762 pthread_mutex_unlock(&mMutex);
1763 return -ENODEV;
1764 }
1765
1766 /* Check whether we have video stream */
1767 m_bIs4KVideo = false;
1768 m_bIsVideo = false;
1769 m_bEisSupportedSize = false;
1770 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001771 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001772 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001773 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001774 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001775 uint32_t videoWidth = 0U;
1776 uint32_t videoHeight = 0U;
1777 size_t rawStreamCnt = 0;
1778 size_t stallStreamCnt = 0;
1779 size_t processedStreamCnt = 0;
1780 // Number of streams on ISP encoder path
1781 size_t numStreamsOnEncoder = 0;
1782 size_t numYuv888OnEncoder = 0;
1783 bool bYuv888OverrideJpeg = false;
1784 cam_dimension_t largeYuv888Size = {0, 0};
1785 cam_dimension_t maxViewfinderSize = {0, 0};
1786 bool bJpegExceeds4K = false;
1787 bool bJpegOnEncoder = false;
1788 bool bUseCommonFeatureMask = false;
1789 cam_feature_mask_t commonFeatureMask = 0;
1790 bool bSmallJpegSize = false;
1791 uint32_t width_ratio;
1792 uint32_t height_ratio;
1793 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1794 camera3_stream_t *inputStream = NULL;
1795 bool isJpeg = false;
1796 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001797 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001798 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001799
1800 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1801
1802 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001803 uint8_t eis_prop_set;
1804 uint32_t maxEisWidth = 0;
1805 uint32_t maxEisHeight = 0;
1806
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001807 // Initialize all instant AEC related variables
1808 mInstantAEC = false;
1809 mResetInstantAEC = false;
1810 mInstantAECSettledFrameNumber = 0;
1811 mAecSkipDisplayFrameBound = 0;
1812 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001813 mCurrFeatureState = 0;
1814 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001815
Thierry Strudel3d639192016-09-09 11:52:26 -07001816 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1817
1818 size_t count = IS_TYPE_MAX;
1819 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1820 for (size_t i = 0; i < count; i++) {
1821 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001822 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1823 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001824 break;
1825 }
1826 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001827
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001828 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001829 maxEisWidth = MAX_EIS_WIDTH;
1830 maxEisHeight = MAX_EIS_HEIGHT;
1831 }
1832
1833 /* EIS setprop control */
1834 char eis_prop[PROPERTY_VALUE_MAX];
1835 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001836 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001837 eis_prop_set = (uint8_t)atoi(eis_prop);
1838
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001839 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001840 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1841
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001842 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1843 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001844
Thierry Strudel3d639192016-09-09 11:52:26 -07001845 /* stream configurations */
1846 for (size_t i = 0; i < streamList->num_streams; i++) {
1847 camera3_stream_t *newStream = streamList->streams[i];
1848 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1849 "height = %d, rotation = %d, usage = 0x%x",
1850 i, newStream->stream_type, newStream->format,
1851 newStream->width, newStream->height, newStream->rotation,
1852 newStream->usage);
1853 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1854 newStream->stream_type == CAMERA3_STREAM_INPUT){
1855 isZsl = true;
1856 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001857 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1858 IS_USAGE_PREVIEW(newStream->usage)) {
1859 isPreview = true;
1860 }
1861
Thierry Strudel3d639192016-09-09 11:52:26 -07001862 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1863 inputStream = newStream;
1864 }
1865
Emilian Peev7650c122017-01-19 08:24:33 -08001866 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1867 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001868 isJpeg = true;
1869 jpegSize.width = newStream->width;
1870 jpegSize.height = newStream->height;
1871 if (newStream->width > VIDEO_4K_WIDTH ||
1872 newStream->height > VIDEO_4K_HEIGHT)
1873 bJpegExceeds4K = true;
1874 }
1875
1876 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1877 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1878 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001879 // In HAL3 we can have multiple different video streams.
1880 // The variables video width and height are used below as
1881 // dimensions of the biggest of them
1882 if (videoWidth < newStream->width ||
1883 videoHeight < newStream->height) {
1884 videoWidth = newStream->width;
1885 videoHeight = newStream->height;
1886 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001887 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1888 (VIDEO_4K_HEIGHT <= newStream->height)) {
1889 m_bIs4KVideo = true;
1890 }
1891 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1892 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001893
Thierry Strudel3d639192016-09-09 11:52:26 -07001894 }
1895 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1896 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1897 switch (newStream->format) {
1898 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001899 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1900 depthPresent = true;
1901 break;
1902 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001903 stallStreamCnt++;
1904 if (isOnEncoder(maxViewfinderSize, newStream->width,
1905 newStream->height)) {
1906 numStreamsOnEncoder++;
1907 bJpegOnEncoder = true;
1908 }
1909 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1910 newStream->width);
1911 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1912 newStream->height);;
1913 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1914 "FATAL: max_downscale_factor cannot be zero and so assert");
1915 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1916 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1917 LOGH("Setting small jpeg size flag to true");
1918 bSmallJpegSize = true;
1919 }
1920 break;
1921 case HAL_PIXEL_FORMAT_RAW10:
1922 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1923 case HAL_PIXEL_FORMAT_RAW16:
1924 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001925 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1926 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1927 pdStatCount++;
1928 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001929 break;
1930 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1931 processedStreamCnt++;
1932 if (isOnEncoder(maxViewfinderSize, newStream->width,
1933 newStream->height)) {
1934 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1935 !IS_USAGE_ZSL(newStream->usage)) {
1936 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1937 }
1938 numStreamsOnEncoder++;
1939 }
1940 break;
1941 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1942 processedStreamCnt++;
1943 if (isOnEncoder(maxViewfinderSize, newStream->width,
1944 newStream->height)) {
1945 // If Yuv888 size is not greater than 4K, set feature mask
1946 // to SUPERSET so that it support concurrent request on
1947 // YUV and JPEG.
1948 if (newStream->width <= VIDEO_4K_WIDTH &&
1949 newStream->height <= VIDEO_4K_HEIGHT) {
1950 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1951 }
1952 numStreamsOnEncoder++;
1953 numYuv888OnEncoder++;
1954 largeYuv888Size.width = newStream->width;
1955 largeYuv888Size.height = newStream->height;
1956 }
1957 break;
1958 default:
1959 processedStreamCnt++;
1960 if (isOnEncoder(maxViewfinderSize, newStream->width,
1961 newStream->height)) {
1962 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1963 numStreamsOnEncoder++;
1964 }
1965 break;
1966 }
1967
1968 }
1969 }
1970
1971 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1972 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1973 !m_bIsVideo) {
1974 m_bEisEnable = false;
1975 }
1976
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001977 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1978 pthread_mutex_unlock(&mMutex);
1979 return -EINVAL;
1980 }
1981
Thierry Strudel54dc9782017-02-15 12:12:10 -08001982 uint8_t forceEnableTnr = 0;
1983 char tnr_prop[PROPERTY_VALUE_MAX];
1984 memset(tnr_prop, 0, sizeof(tnr_prop));
1985 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1986 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1987
Thierry Strudel3d639192016-09-09 11:52:26 -07001988 /* Logic to enable/disable TNR based on specific config size/etc.*/
1989 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001990 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1991 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001992 else if (forceEnableTnr)
1993 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001994
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001995 char videoHdrProp[PROPERTY_VALUE_MAX];
1996 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1997 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1998 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
1999
2000 if (hdr_mode_prop == 1 && m_bIsVideo &&
2001 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2002 m_bVideoHdrEnabled = true;
2003 else
2004 m_bVideoHdrEnabled = false;
2005
2006
Thierry Strudel3d639192016-09-09 11:52:26 -07002007 /* Check if num_streams is sane */
2008 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2009 rawStreamCnt > MAX_RAW_STREAMS ||
2010 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2011 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2012 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2013 pthread_mutex_unlock(&mMutex);
2014 return -EINVAL;
2015 }
2016 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002017 if (isZsl && m_bIs4KVideo) {
2018 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002019 pthread_mutex_unlock(&mMutex);
2020 return -EINVAL;
2021 }
2022 /* Check if stream sizes are sane */
2023 if (numStreamsOnEncoder > 2) {
2024 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2025 pthread_mutex_unlock(&mMutex);
2026 return -EINVAL;
2027 } else if (1 < numStreamsOnEncoder){
2028 bUseCommonFeatureMask = true;
2029 LOGH("Multiple streams above max viewfinder size, common mask needed");
2030 }
2031
2032 /* Check if BLOB size is greater than 4k in 4k recording case */
2033 if (m_bIs4KVideo && bJpegExceeds4K) {
2034 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2035 pthread_mutex_unlock(&mMutex);
2036 return -EINVAL;
2037 }
2038
Emilian Peev7650c122017-01-19 08:24:33 -08002039 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2040 depthPresent) {
2041 LOGE("HAL doesn't support depth streams in HFR mode!");
2042 pthread_mutex_unlock(&mMutex);
2043 return -EINVAL;
2044 }
2045
Thierry Strudel3d639192016-09-09 11:52:26 -07002046 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2047 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2048 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2049 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2050 // configurations:
2051 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2052 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2053 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2054 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2055 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2056 __func__);
2057 pthread_mutex_unlock(&mMutex);
2058 return -EINVAL;
2059 }
2060
2061 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2062 // the YUV stream's size is greater or equal to the JPEG size, set common
2063 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2064 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2065 jpegSize.width, jpegSize.height) &&
2066 largeYuv888Size.width > jpegSize.width &&
2067 largeYuv888Size.height > jpegSize.height) {
2068 bYuv888OverrideJpeg = true;
2069 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2070 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2071 }
2072
2073 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2074 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2075 commonFeatureMask);
2076 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2077 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2078
2079 rc = validateStreamDimensions(streamList);
2080 if (rc == NO_ERROR) {
2081 rc = validateStreamRotations(streamList);
2082 }
2083 if (rc != NO_ERROR) {
2084 LOGE("Invalid stream configuration requested!");
2085 pthread_mutex_unlock(&mMutex);
2086 return rc;
2087 }
2088
Emilian Peev0f3c3162017-03-15 12:57:46 +00002089 if (1 < pdStatCount) {
2090 LOGE("HAL doesn't support multiple PD streams");
2091 pthread_mutex_unlock(&mMutex);
2092 return -EINVAL;
2093 }
2094
2095 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2096 (1 == pdStatCount)) {
2097 LOGE("HAL doesn't support PD streams in HFR mode!");
2098 pthread_mutex_unlock(&mMutex);
2099 return -EINVAL;
2100 }
2101
Thierry Strudel3d639192016-09-09 11:52:26 -07002102 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2103 for (size_t i = 0; i < streamList->num_streams; i++) {
2104 camera3_stream_t *newStream = streamList->streams[i];
2105 LOGH("newStream type = %d, stream format = %d "
2106 "stream size : %d x %d, stream rotation = %d",
2107 newStream->stream_type, newStream->format,
2108 newStream->width, newStream->height, newStream->rotation);
2109 //if the stream is in the mStreamList validate it
2110 bool stream_exists = false;
2111 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2112 it != mStreamInfo.end(); it++) {
2113 if ((*it)->stream == newStream) {
2114 QCamera3ProcessingChannel *channel =
2115 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2116 stream_exists = true;
2117 if (channel)
2118 delete channel;
2119 (*it)->status = VALID;
2120 (*it)->stream->priv = NULL;
2121 (*it)->channel = NULL;
2122 }
2123 }
2124 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2125 //new stream
2126 stream_info_t* stream_info;
2127 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2128 if (!stream_info) {
2129 LOGE("Could not allocate stream info");
2130 rc = -ENOMEM;
2131 pthread_mutex_unlock(&mMutex);
2132 return rc;
2133 }
2134 stream_info->stream = newStream;
2135 stream_info->status = VALID;
2136 stream_info->channel = NULL;
2137 mStreamInfo.push_back(stream_info);
2138 }
2139 /* Covers Opaque ZSL and API1 F/W ZSL */
2140 if (IS_USAGE_ZSL(newStream->usage)
2141 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2142 if (zslStream != NULL) {
2143 LOGE("Multiple input/reprocess streams requested!");
2144 pthread_mutex_unlock(&mMutex);
2145 return BAD_VALUE;
2146 }
2147 zslStream = newStream;
2148 }
2149 /* Covers YUV reprocess */
2150 if (inputStream != NULL) {
2151 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2152 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2153 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2154 && inputStream->width == newStream->width
2155 && inputStream->height == newStream->height) {
2156 if (zslStream != NULL) {
2157 /* This scenario indicates multiple YUV streams with same size
2158 * as input stream have been requested, since zsl stream handle
2159 * is solely use for the purpose of overriding the size of streams
2160 * which share h/w streams we will just make a guess here as to
2161 * which of the stream is a ZSL stream, this will be refactored
2162 * once we make generic logic for streams sharing encoder output
2163 */
2164 LOGH("Warning, Multiple ip/reprocess streams requested!");
2165 }
2166 zslStream = newStream;
2167 }
2168 }
2169 }
2170
2171 /* If a zsl stream is set, we know that we have configured at least one input or
2172 bidirectional stream */
2173 if (NULL != zslStream) {
2174 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2175 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2176 mInputStreamInfo.format = zslStream->format;
2177 mInputStreamInfo.usage = zslStream->usage;
2178 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2179 mInputStreamInfo.dim.width,
2180 mInputStreamInfo.dim.height,
2181 mInputStreamInfo.format, mInputStreamInfo.usage);
2182 }
2183
2184 cleanAndSortStreamInfo();
2185 if (mMetadataChannel) {
2186 delete mMetadataChannel;
2187 mMetadataChannel = NULL;
2188 }
2189 if (mSupportChannel) {
2190 delete mSupportChannel;
2191 mSupportChannel = NULL;
2192 }
2193
2194 if (mAnalysisChannel) {
2195 delete mAnalysisChannel;
2196 mAnalysisChannel = NULL;
2197 }
2198
2199 if (mDummyBatchChannel) {
2200 delete mDummyBatchChannel;
2201 mDummyBatchChannel = NULL;
2202 }
2203
Emilian Peev7650c122017-01-19 08:24:33 -08002204 if (mDepthChannel) {
2205 mDepthChannel = NULL;
2206 }
2207
Thierry Strudel2896d122017-02-23 19:18:03 -08002208 char is_type_value[PROPERTY_VALUE_MAX];
2209 property_get("persist.camera.is_type", is_type_value, "4");
2210 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2211
Thierry Strudel3d639192016-09-09 11:52:26 -07002212 //Create metadata channel and initialize it
2213 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2214 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2215 gCamCapability[mCameraId]->color_arrangement);
2216 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2217 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002218 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002219 if (mMetadataChannel == NULL) {
2220 LOGE("failed to allocate metadata channel");
2221 rc = -ENOMEM;
2222 pthread_mutex_unlock(&mMutex);
2223 return rc;
2224 }
2225 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2226 if (rc < 0) {
2227 LOGE("metadata channel initialization failed");
2228 delete mMetadataChannel;
2229 mMetadataChannel = NULL;
2230 pthread_mutex_unlock(&mMutex);
2231 return rc;
2232 }
2233
Thierry Strudel2896d122017-02-23 19:18:03 -08002234 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002235 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002236 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002237 // Keep track of preview/video streams indices.
2238 // There could be more than one preview streams, but only one video stream.
2239 int32_t video_stream_idx = -1;
2240 int32_t preview_stream_idx[streamList->num_streams];
2241 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002242 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2243 /* Allocate channel objects for the requested streams */
2244 for (size_t i = 0; i < streamList->num_streams; i++) {
2245 camera3_stream_t *newStream = streamList->streams[i];
2246 uint32_t stream_usage = newStream->usage;
2247 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2248 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2249 struct camera_info *p_info = NULL;
2250 pthread_mutex_lock(&gCamLock);
2251 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2252 pthread_mutex_unlock(&gCamLock);
2253 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2254 || IS_USAGE_ZSL(newStream->usage)) &&
2255 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002256 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002257 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002258 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2259 if (bUseCommonFeatureMask)
2260 zsl_ppmask = commonFeatureMask;
2261 else
2262 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002263 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002264 if (numStreamsOnEncoder > 0)
2265 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2266 else
2267 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002268 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002269 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002270 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002271 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002272 LOGH("Input stream configured, reprocess config");
2273 } else {
2274 //for non zsl streams find out the format
2275 switch (newStream->format) {
2276 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2277 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002278 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002279 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2280 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2281 /* add additional features to pp feature mask */
2282 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2283 mStreamConfigInfo.num_streams);
2284
2285 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2286 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2287 CAM_STREAM_TYPE_VIDEO;
2288 if (m_bTnrEnabled && m_bTnrVideo) {
2289 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2290 CAM_QCOM_FEATURE_CPP_TNR;
2291 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2292 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2293 ~CAM_QCOM_FEATURE_CDS;
2294 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002295 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2296 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2297 CAM_QTI_FEATURE_PPEISCORE;
2298 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002299 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002300 } else {
2301 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2302 CAM_STREAM_TYPE_PREVIEW;
2303 if (m_bTnrEnabled && m_bTnrPreview) {
2304 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2305 CAM_QCOM_FEATURE_CPP_TNR;
2306 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2307 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2308 ~CAM_QCOM_FEATURE_CDS;
2309 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002310 if(!m_bSwTnrPreview) {
2311 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2312 ~CAM_QTI_FEATURE_SW_TNR;
2313 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002314 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002315 padding_info.width_padding = mSurfaceStridePadding;
2316 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002317 previewSize.width = (int32_t)newStream->width;
2318 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002319 }
2320 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2321 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2322 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2323 newStream->height;
2324 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2325 newStream->width;
2326 }
2327 }
2328 break;
2329 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002330 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002331 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2332 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2333 if (bUseCommonFeatureMask)
2334 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2335 commonFeatureMask;
2336 else
2337 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2338 CAM_QCOM_FEATURE_NONE;
2339 } else {
2340 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2341 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2342 }
2343 break;
2344 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002345 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002346 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2347 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2348 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2349 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2350 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002351 /* Remove rotation if it is not supported
2352 for 4K LiveVideo snapshot case (online processing) */
2353 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2354 CAM_QCOM_FEATURE_ROTATION)) {
2355 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2356 &= ~CAM_QCOM_FEATURE_ROTATION;
2357 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002358 } else {
2359 if (bUseCommonFeatureMask &&
2360 isOnEncoder(maxViewfinderSize, newStream->width,
2361 newStream->height)) {
2362 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2363 } else {
2364 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2365 }
2366 }
2367 if (isZsl) {
2368 if (zslStream) {
2369 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2370 (int32_t)zslStream->width;
2371 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2372 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002373 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2374 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002375 } else {
2376 LOGE("Error, No ZSL stream identified");
2377 pthread_mutex_unlock(&mMutex);
2378 return -EINVAL;
2379 }
2380 } else if (m_bIs4KVideo) {
2381 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2382 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2383 } else if (bYuv888OverrideJpeg) {
2384 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2385 (int32_t)largeYuv888Size.width;
2386 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2387 (int32_t)largeYuv888Size.height;
2388 }
2389 break;
2390 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2391 case HAL_PIXEL_FORMAT_RAW16:
2392 case HAL_PIXEL_FORMAT_RAW10:
2393 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2394 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2395 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002396 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2397 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2398 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2399 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2400 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2401 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2402 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2403 gCamCapability[mCameraId]->dt[mPDIndex];
2404 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2405 gCamCapability[mCameraId]->vc[mPDIndex];
2406 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002407 break;
2408 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002409 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002410 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2411 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2412 break;
2413 }
2414 }
2415
2416 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2417 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2418 gCamCapability[mCameraId]->color_arrangement);
2419
2420 if (newStream->priv == NULL) {
2421 //New stream, construct channel
2422 switch (newStream->stream_type) {
2423 case CAMERA3_STREAM_INPUT:
2424 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2425 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2426 break;
2427 case CAMERA3_STREAM_BIDIRECTIONAL:
2428 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2429 GRALLOC_USAGE_HW_CAMERA_WRITE;
2430 break;
2431 case CAMERA3_STREAM_OUTPUT:
2432 /* For video encoding stream, set read/write rarely
2433 * flag so that they may be set to un-cached */
2434 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2435 newStream->usage |=
2436 (GRALLOC_USAGE_SW_READ_RARELY |
2437 GRALLOC_USAGE_SW_WRITE_RARELY |
2438 GRALLOC_USAGE_HW_CAMERA_WRITE);
2439 else if (IS_USAGE_ZSL(newStream->usage))
2440 {
2441 LOGD("ZSL usage flag skipping");
2442 }
2443 else if (newStream == zslStream
2444 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2445 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2446 } else
2447 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2448 break;
2449 default:
2450 LOGE("Invalid stream_type %d", newStream->stream_type);
2451 break;
2452 }
2453
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002454 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002455 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2456 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2457 QCamera3ProcessingChannel *channel = NULL;
2458 switch (newStream->format) {
2459 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2460 if ((newStream->usage &
2461 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2462 (streamList->operation_mode ==
2463 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2464 ) {
2465 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2466 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002467 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002468 this,
2469 newStream,
2470 (cam_stream_type_t)
2471 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2472 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2473 mMetadataChannel,
2474 0); //heap buffers are not required for HFR video channel
2475 if (channel == NULL) {
2476 LOGE("allocation of channel failed");
2477 pthread_mutex_unlock(&mMutex);
2478 return -ENOMEM;
2479 }
2480 //channel->getNumBuffers() will return 0 here so use
2481 //MAX_INFLIGH_HFR_REQUESTS
2482 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2483 newStream->priv = channel;
2484 LOGI("num video buffers in HFR mode: %d",
2485 MAX_INFLIGHT_HFR_REQUESTS);
2486 } else {
2487 /* Copy stream contents in HFR preview only case to create
2488 * dummy batch channel so that sensor streaming is in
2489 * HFR mode */
2490 if (!m_bIsVideo && (streamList->operation_mode ==
2491 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2492 mDummyBatchStream = *newStream;
2493 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002494 int bufferCount = MAX_INFLIGHT_REQUESTS;
2495 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2496 CAM_STREAM_TYPE_VIDEO) {
2497 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2498 bufferCount = MAX_VIDEO_BUFFERS;
2499 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002500 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2501 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002502 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002503 this,
2504 newStream,
2505 (cam_stream_type_t)
2506 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2507 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2508 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002509 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002510 if (channel == NULL) {
2511 LOGE("allocation of channel failed");
2512 pthread_mutex_unlock(&mMutex);
2513 return -ENOMEM;
2514 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002515 /* disable UBWC for preview, though supported,
2516 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002517 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002518 (previewSize.width == (int32_t)videoWidth)&&
2519 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002520 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002521 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002522 channel->setUBWCEnabled(forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002523 newStream->max_buffers = channel->getNumBuffers();
2524 newStream->priv = channel;
2525 }
2526 break;
2527 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2528 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2529 mChannelHandle,
2530 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002531 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002532 this,
2533 newStream,
2534 (cam_stream_type_t)
2535 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2536 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2537 mMetadataChannel);
2538 if (channel == NULL) {
2539 LOGE("allocation of YUV channel failed");
2540 pthread_mutex_unlock(&mMutex);
2541 return -ENOMEM;
2542 }
2543 newStream->max_buffers = channel->getNumBuffers();
2544 newStream->priv = channel;
2545 break;
2546 }
2547 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2548 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002549 case HAL_PIXEL_FORMAT_RAW10: {
2550 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2551 (HAL_DATASPACE_DEPTH != newStream->data_space))
2552 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002553 mRawChannel = new QCamera3RawChannel(
2554 mCameraHandle->camera_handle, mChannelHandle,
2555 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002556 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002557 this, newStream,
2558 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002559 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002560 if (mRawChannel == NULL) {
2561 LOGE("allocation of raw channel failed");
2562 pthread_mutex_unlock(&mMutex);
2563 return -ENOMEM;
2564 }
2565 newStream->max_buffers = mRawChannel->getNumBuffers();
2566 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2567 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002568 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002569 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002570 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2571 mDepthChannel = new QCamera3DepthChannel(
2572 mCameraHandle->camera_handle, mChannelHandle,
2573 mCameraHandle->ops, NULL, NULL, &padding_info,
2574 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2575 mMetadataChannel);
2576 if (NULL == mDepthChannel) {
2577 LOGE("Allocation of depth channel failed");
2578 pthread_mutex_unlock(&mMutex);
2579 return NO_MEMORY;
2580 }
2581 newStream->priv = mDepthChannel;
2582 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2583 } else {
2584 // Max live snapshot inflight buffer is 1. This is to mitigate
2585 // frame drop issues for video snapshot. The more buffers being
2586 // allocated, the more frame drops there are.
2587 mPictureChannel = new QCamera3PicChannel(
2588 mCameraHandle->camera_handle, mChannelHandle,
2589 mCameraHandle->ops, captureResultCb,
2590 setBufferErrorStatus, &padding_info, this, newStream,
2591 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2592 m_bIs4KVideo, isZsl, mMetadataChannel,
2593 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2594 if (mPictureChannel == NULL) {
2595 LOGE("allocation of channel failed");
2596 pthread_mutex_unlock(&mMutex);
2597 return -ENOMEM;
2598 }
2599 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2600 newStream->max_buffers = mPictureChannel->getNumBuffers();
2601 mPictureChannel->overrideYuvSize(
2602 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2603 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002604 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002605 break;
2606
2607 default:
2608 LOGE("not a supported format 0x%x", newStream->format);
2609 break;
2610 }
2611 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2612 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2613 } else {
2614 LOGE("Error, Unknown stream type");
2615 pthread_mutex_unlock(&mMutex);
2616 return -EINVAL;
2617 }
2618
2619 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002620 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2621 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002622 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002623 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002624 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2625 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2626 }
2627 }
2628
2629 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2630 it != mStreamInfo.end(); it++) {
2631 if ((*it)->stream == newStream) {
2632 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2633 break;
2634 }
2635 }
2636 } else {
2637 // Channel already exists for this stream
2638 // Do nothing for now
2639 }
2640 padding_info = gCamCapability[mCameraId]->padding_info;
2641
Emilian Peev7650c122017-01-19 08:24:33 -08002642 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002643 * since there is no real stream associated with it
2644 */
Emilian Peev7650c122017-01-19 08:24:33 -08002645 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002646 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2647 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002648 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002649 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002650 }
2651
Binhao Lincdb362a2017-04-20 13:31:54 -07002652 // By default, preview stream TNR is disabled.
2653 // Enable TNR to the preview stream if all conditions below are satisfied:
2654 // 1. resolution <= 1080p.
2655 // 2. preview resolution == video resolution.
2656 // 3. video stream TNR is enabled.
2657 // 4. EIS2.0
2658 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2659 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2660 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2661 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2662 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2663 video_stream->width == preview_stream->width &&
2664 video_stream->height == preview_stream->height) {
2665 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2666 CAM_QCOM_FEATURE_CPP_TNR;
2667 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2668 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2669 ~CAM_QCOM_FEATURE_CDS;
2670 }
2671 }
2672
Thierry Strudel2896d122017-02-23 19:18:03 -08002673 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2674 onlyRaw = false;
2675 }
2676
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002677 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002678 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002679 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002680 cam_analysis_info_t analysisInfo;
2681 int32_t ret = NO_ERROR;
2682 ret = mCommon.getAnalysisInfo(
2683 FALSE,
2684 analysisFeatureMask,
2685 &analysisInfo);
2686 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002687 cam_color_filter_arrangement_t analysis_color_arrangement =
2688 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2689 CAM_FILTER_ARRANGEMENT_Y :
2690 gCamCapability[mCameraId]->color_arrangement);
2691 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2692 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002693 cam_dimension_t analysisDim;
2694 analysisDim = mCommon.getMatchingDimension(previewSize,
2695 analysisInfo.analysis_recommended_res);
2696
2697 mAnalysisChannel = new QCamera3SupportChannel(
2698 mCameraHandle->camera_handle,
2699 mChannelHandle,
2700 mCameraHandle->ops,
2701 &analysisInfo.analysis_padding_info,
2702 analysisFeatureMask,
2703 CAM_STREAM_TYPE_ANALYSIS,
2704 &analysisDim,
2705 (analysisInfo.analysis_format
2706 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2707 : CAM_FORMAT_YUV_420_NV21),
2708 analysisInfo.hw_analysis_supported,
2709 gCamCapability[mCameraId]->color_arrangement,
2710 this,
2711 0); // force buffer count to 0
2712 } else {
2713 LOGW("getAnalysisInfo failed, ret = %d", ret);
2714 }
2715 if (!mAnalysisChannel) {
2716 LOGW("Analysis channel cannot be created");
2717 }
2718 }
2719
Thierry Strudel3d639192016-09-09 11:52:26 -07002720 //RAW DUMP channel
2721 if (mEnableRawDump && isRawStreamRequested == false){
2722 cam_dimension_t rawDumpSize;
2723 rawDumpSize = getMaxRawSize(mCameraId);
2724 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2725 setPAAFSupport(rawDumpFeatureMask,
2726 CAM_STREAM_TYPE_RAW,
2727 gCamCapability[mCameraId]->color_arrangement);
2728 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2729 mChannelHandle,
2730 mCameraHandle->ops,
2731 rawDumpSize,
2732 &padding_info,
2733 this, rawDumpFeatureMask);
2734 if (!mRawDumpChannel) {
2735 LOGE("Raw Dump channel cannot be created");
2736 pthread_mutex_unlock(&mMutex);
2737 return -ENOMEM;
2738 }
2739 }
2740
Thierry Strudel3d639192016-09-09 11:52:26 -07002741 if (mAnalysisChannel) {
2742 cam_analysis_info_t analysisInfo;
2743 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2744 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2745 CAM_STREAM_TYPE_ANALYSIS;
2746 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2747 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002748 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002749 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2750 &analysisInfo);
2751 if (rc != NO_ERROR) {
2752 LOGE("getAnalysisInfo failed, ret = %d", rc);
2753 pthread_mutex_unlock(&mMutex);
2754 return rc;
2755 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002756 cam_color_filter_arrangement_t analysis_color_arrangement =
2757 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2758 CAM_FILTER_ARRANGEMENT_Y :
2759 gCamCapability[mCameraId]->color_arrangement);
2760 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2761 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2762 analysis_color_arrangement);
2763
Thierry Strudel3d639192016-09-09 11:52:26 -07002764 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002765 mCommon.getMatchingDimension(previewSize,
2766 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002767 mStreamConfigInfo.num_streams++;
2768 }
2769
Thierry Strudel2896d122017-02-23 19:18:03 -08002770 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002771 cam_analysis_info_t supportInfo;
2772 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2773 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2774 setPAAFSupport(callbackFeatureMask,
2775 CAM_STREAM_TYPE_CALLBACK,
2776 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002777 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002778 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002779 if (ret != NO_ERROR) {
2780 /* Ignore the error for Mono camera
2781 * because the PAAF bit mask is only set
2782 * for CAM_STREAM_TYPE_ANALYSIS stream type
2783 */
2784 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2785 LOGW("getAnalysisInfo failed, ret = %d", ret);
2786 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002787 }
2788 mSupportChannel = new QCamera3SupportChannel(
2789 mCameraHandle->camera_handle,
2790 mChannelHandle,
2791 mCameraHandle->ops,
2792 &gCamCapability[mCameraId]->padding_info,
2793 callbackFeatureMask,
2794 CAM_STREAM_TYPE_CALLBACK,
2795 &QCamera3SupportChannel::kDim,
2796 CAM_FORMAT_YUV_420_NV21,
2797 supportInfo.hw_analysis_supported,
2798 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002799 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002800 if (!mSupportChannel) {
2801 LOGE("dummy channel cannot be created");
2802 pthread_mutex_unlock(&mMutex);
2803 return -ENOMEM;
2804 }
2805 }
2806
2807 if (mSupportChannel) {
2808 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2809 QCamera3SupportChannel::kDim;
2810 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2811 CAM_STREAM_TYPE_CALLBACK;
2812 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2813 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2814 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2815 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2816 gCamCapability[mCameraId]->color_arrangement);
2817 mStreamConfigInfo.num_streams++;
2818 }
2819
2820 if (mRawDumpChannel) {
2821 cam_dimension_t rawSize;
2822 rawSize = getMaxRawSize(mCameraId);
2823 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2824 rawSize;
2825 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2826 CAM_STREAM_TYPE_RAW;
2827 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2828 CAM_QCOM_FEATURE_NONE;
2829 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2830 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2831 gCamCapability[mCameraId]->color_arrangement);
2832 mStreamConfigInfo.num_streams++;
2833 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002834
2835 if (mHdrPlusRawSrcChannel) {
2836 cam_dimension_t rawSize;
2837 rawSize = getMaxRawSize(mCameraId);
2838 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2839 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2840 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2841 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2842 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2843 gCamCapability[mCameraId]->color_arrangement);
2844 mStreamConfigInfo.num_streams++;
2845 }
2846
Thierry Strudel3d639192016-09-09 11:52:26 -07002847 /* In HFR mode, if video stream is not added, create a dummy channel so that
2848 * ISP can create a batch mode even for preview only case. This channel is
2849 * never 'start'ed (no stream-on), it is only 'initialized' */
2850 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2851 !m_bIsVideo) {
2852 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2853 setPAAFSupport(dummyFeatureMask,
2854 CAM_STREAM_TYPE_VIDEO,
2855 gCamCapability[mCameraId]->color_arrangement);
2856 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2857 mChannelHandle,
2858 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002859 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002860 this,
2861 &mDummyBatchStream,
2862 CAM_STREAM_TYPE_VIDEO,
2863 dummyFeatureMask,
2864 mMetadataChannel);
2865 if (NULL == mDummyBatchChannel) {
2866 LOGE("creation of mDummyBatchChannel failed."
2867 "Preview will use non-hfr sensor mode ");
2868 }
2869 }
2870 if (mDummyBatchChannel) {
2871 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2872 mDummyBatchStream.width;
2873 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2874 mDummyBatchStream.height;
2875 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2876 CAM_STREAM_TYPE_VIDEO;
2877 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2878 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2879 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2880 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2881 gCamCapability[mCameraId]->color_arrangement);
2882 mStreamConfigInfo.num_streams++;
2883 }
2884
2885 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2886 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002887 m_bIs4KVideo ? 0 :
2888 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002889
2890 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2891 for (pendingRequestIterator i = mPendingRequestsList.begin();
2892 i != mPendingRequestsList.end();) {
2893 i = erasePendingRequest(i);
2894 }
2895 mPendingFrameDropList.clear();
2896 // Initialize/Reset the pending buffers list
2897 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2898 req.mPendingBufferList.clear();
2899 }
2900 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2901
Thierry Strudel3d639192016-09-09 11:52:26 -07002902 mCurJpegMeta.clear();
2903 //Get min frame duration for this streams configuration
2904 deriveMinFrameDuration();
2905
Chien-Yu Chenee335912017-02-09 17:53:20 -08002906 mFirstPreviewIntentSeen = false;
2907
2908 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002909 {
2910 Mutex::Autolock l(gHdrPlusClientLock);
2911 disableHdrPlusModeLocked();
2912 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002913
Thierry Strudel3d639192016-09-09 11:52:26 -07002914 // Update state
2915 mState = CONFIGURED;
2916
2917 pthread_mutex_unlock(&mMutex);
2918
2919 return rc;
2920}
2921
2922/*===========================================================================
2923 * FUNCTION : validateCaptureRequest
2924 *
2925 * DESCRIPTION: validate a capture request from camera service
2926 *
2927 * PARAMETERS :
2928 * @request : request from framework to process
2929 *
2930 * RETURN :
2931 *
2932 *==========================================================================*/
2933int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002934 camera3_capture_request_t *request,
2935 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002936{
2937 ssize_t idx = 0;
2938 const camera3_stream_buffer_t *b;
2939 CameraMetadata meta;
2940
2941 /* Sanity check the request */
2942 if (request == NULL) {
2943 LOGE("NULL capture request");
2944 return BAD_VALUE;
2945 }
2946
2947 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2948 /*settings cannot be null for the first request*/
2949 return BAD_VALUE;
2950 }
2951
2952 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002953 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2954 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002955 LOGE("Request %d: No output buffers provided!",
2956 __FUNCTION__, frameNumber);
2957 return BAD_VALUE;
2958 }
2959 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2960 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2961 request->num_output_buffers, MAX_NUM_STREAMS);
2962 return BAD_VALUE;
2963 }
2964 if (request->input_buffer != NULL) {
2965 b = request->input_buffer;
2966 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2967 LOGE("Request %d: Buffer %ld: Status not OK!",
2968 frameNumber, (long)idx);
2969 return BAD_VALUE;
2970 }
2971 if (b->release_fence != -1) {
2972 LOGE("Request %d: Buffer %ld: Has a release fence!",
2973 frameNumber, (long)idx);
2974 return BAD_VALUE;
2975 }
2976 if (b->buffer == NULL) {
2977 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2978 frameNumber, (long)idx);
2979 return BAD_VALUE;
2980 }
2981 }
2982
2983 // Validate all buffers
2984 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002985 if (b == NULL) {
2986 return BAD_VALUE;
2987 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002988 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002989 QCamera3ProcessingChannel *channel =
2990 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2991 if (channel == NULL) {
2992 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2993 frameNumber, (long)idx);
2994 return BAD_VALUE;
2995 }
2996 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2997 LOGE("Request %d: Buffer %ld: Status not OK!",
2998 frameNumber, (long)idx);
2999 return BAD_VALUE;
3000 }
3001 if (b->release_fence != -1) {
3002 LOGE("Request %d: Buffer %ld: Has a release fence!",
3003 frameNumber, (long)idx);
3004 return BAD_VALUE;
3005 }
3006 if (b->buffer == NULL) {
3007 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3008 frameNumber, (long)idx);
3009 return BAD_VALUE;
3010 }
3011 if (*(b->buffer) == NULL) {
3012 LOGE("Request %d: Buffer %ld: NULL private handle!",
3013 frameNumber, (long)idx);
3014 return BAD_VALUE;
3015 }
3016 idx++;
3017 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003018 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003019 return NO_ERROR;
3020}
3021
3022/*===========================================================================
3023 * FUNCTION : deriveMinFrameDuration
3024 *
3025 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3026 * on currently configured streams.
3027 *
3028 * PARAMETERS : NONE
3029 *
3030 * RETURN : NONE
3031 *
3032 *==========================================================================*/
3033void QCamera3HardwareInterface::deriveMinFrameDuration()
3034{
3035 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3036
3037 maxJpegDim = 0;
3038 maxProcessedDim = 0;
3039 maxRawDim = 0;
3040
3041 // Figure out maximum jpeg, processed, and raw dimensions
3042 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3043 it != mStreamInfo.end(); it++) {
3044
3045 // Input stream doesn't have valid stream_type
3046 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3047 continue;
3048
3049 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3050 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3051 if (dimension > maxJpegDim)
3052 maxJpegDim = dimension;
3053 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3054 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3055 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3056 if (dimension > maxRawDim)
3057 maxRawDim = dimension;
3058 } else {
3059 if (dimension > maxProcessedDim)
3060 maxProcessedDim = dimension;
3061 }
3062 }
3063
3064 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3065 MAX_SIZES_CNT);
3066
3067 //Assume all jpeg dimensions are in processed dimensions.
3068 if (maxJpegDim > maxProcessedDim)
3069 maxProcessedDim = maxJpegDim;
3070 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3071 if (maxProcessedDim > maxRawDim) {
3072 maxRawDim = INT32_MAX;
3073
3074 for (size_t i = 0; i < count; i++) {
3075 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3076 gCamCapability[mCameraId]->raw_dim[i].height;
3077 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3078 maxRawDim = dimension;
3079 }
3080 }
3081
3082 //Find minimum durations for processed, jpeg, and raw
3083 for (size_t i = 0; i < count; i++) {
3084 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3085 gCamCapability[mCameraId]->raw_dim[i].height) {
3086 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3087 break;
3088 }
3089 }
3090 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3091 for (size_t i = 0; i < count; i++) {
3092 if (maxProcessedDim ==
3093 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3094 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3095 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3096 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3097 break;
3098 }
3099 }
3100}
3101
3102/*===========================================================================
3103 * FUNCTION : getMinFrameDuration
3104 *
3105 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3106 * and current request configuration.
3107 *
3108 * PARAMETERS : @request: requset sent by the frameworks
3109 *
3110 * RETURN : min farme duration for a particular request
3111 *
3112 *==========================================================================*/
3113int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3114{
3115 bool hasJpegStream = false;
3116 bool hasRawStream = false;
3117 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3118 const camera3_stream_t *stream = request->output_buffers[i].stream;
3119 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3120 hasJpegStream = true;
3121 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3122 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3123 stream->format == HAL_PIXEL_FORMAT_RAW16)
3124 hasRawStream = true;
3125 }
3126
3127 if (!hasJpegStream)
3128 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3129 else
3130 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3131}
3132
3133/*===========================================================================
3134 * FUNCTION : handleBuffersDuringFlushLock
3135 *
3136 * DESCRIPTION: Account for buffers returned from back-end during flush
3137 * This function is executed while mMutex is held by the caller.
3138 *
3139 * PARAMETERS :
3140 * @buffer: image buffer for the callback
3141 *
3142 * RETURN :
3143 *==========================================================================*/
3144void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3145{
3146 bool buffer_found = false;
3147 for (List<PendingBuffersInRequest>::iterator req =
3148 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3149 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3150 for (List<PendingBufferInfo>::iterator i =
3151 req->mPendingBufferList.begin();
3152 i != req->mPendingBufferList.end(); i++) {
3153 if (i->buffer == buffer->buffer) {
3154 mPendingBuffersMap.numPendingBufsAtFlush--;
3155 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3156 buffer->buffer, req->frame_number,
3157 mPendingBuffersMap.numPendingBufsAtFlush);
3158 buffer_found = true;
3159 break;
3160 }
3161 }
3162 if (buffer_found) {
3163 break;
3164 }
3165 }
3166 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3167 //signal the flush()
3168 LOGD("All buffers returned to HAL. Continue flush");
3169 pthread_cond_signal(&mBuffersCond);
3170 }
3171}
3172
Thierry Strudel3d639192016-09-09 11:52:26 -07003173/*===========================================================================
3174 * FUNCTION : handleBatchMetadata
3175 *
3176 * DESCRIPTION: Handles metadata buffer callback in batch mode
3177 *
3178 * PARAMETERS : @metadata_buf: metadata buffer
3179 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3180 * the meta buf in this method
3181 *
3182 * RETURN :
3183 *
3184 *==========================================================================*/
3185void QCamera3HardwareInterface::handleBatchMetadata(
3186 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3187{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003188 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003189
3190 if (NULL == metadata_buf) {
3191 LOGE("metadata_buf is NULL");
3192 return;
3193 }
3194 /* In batch mode, the metdata will contain the frame number and timestamp of
3195 * the last frame in the batch. Eg: a batch containing buffers from request
3196 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3197 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3198 * multiple process_capture_results */
3199 metadata_buffer_t *metadata =
3200 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3201 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3202 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3203 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3204 uint32_t frame_number = 0, urgent_frame_number = 0;
3205 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3206 bool invalid_metadata = false;
3207 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3208 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003209 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003210
3211 int32_t *p_frame_number_valid =
3212 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3213 uint32_t *p_frame_number =
3214 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3215 int64_t *p_capture_time =
3216 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3217 int32_t *p_urgent_frame_number_valid =
3218 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3219 uint32_t *p_urgent_frame_number =
3220 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3221
3222 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3223 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3224 (NULL == p_urgent_frame_number)) {
3225 LOGE("Invalid metadata");
3226 invalid_metadata = true;
3227 } else {
3228 frame_number_valid = *p_frame_number_valid;
3229 last_frame_number = *p_frame_number;
3230 last_frame_capture_time = *p_capture_time;
3231 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3232 last_urgent_frame_number = *p_urgent_frame_number;
3233 }
3234
3235 /* In batchmode, when no video buffers are requested, set_parms are sent
3236 * for every capture_request. The difference between consecutive urgent
3237 * frame numbers and frame numbers should be used to interpolate the
3238 * corresponding frame numbers and time stamps */
3239 pthread_mutex_lock(&mMutex);
3240 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003241 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3242 if(idx < 0) {
3243 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3244 last_urgent_frame_number);
3245 mState = ERROR;
3246 pthread_mutex_unlock(&mMutex);
3247 return;
3248 }
3249 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003250 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3251 first_urgent_frame_number;
3252
3253 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3254 urgent_frame_number_valid,
3255 first_urgent_frame_number, last_urgent_frame_number);
3256 }
3257
3258 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003259 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3260 if(idx < 0) {
3261 LOGE("Invalid frame number received: %d. Irrecoverable error",
3262 last_frame_number);
3263 mState = ERROR;
3264 pthread_mutex_unlock(&mMutex);
3265 return;
3266 }
3267 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003268 frameNumDiff = last_frame_number + 1 -
3269 first_frame_number;
3270 mPendingBatchMap.removeItem(last_frame_number);
3271
3272 LOGD("frm: valid: %d frm_num: %d - %d",
3273 frame_number_valid,
3274 first_frame_number, last_frame_number);
3275
3276 }
3277 pthread_mutex_unlock(&mMutex);
3278
3279 if (urgent_frame_number_valid || frame_number_valid) {
3280 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3281 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3282 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3283 urgentFrameNumDiff, last_urgent_frame_number);
3284 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3285 LOGE("frameNumDiff: %d frameNum: %d",
3286 frameNumDiff, last_frame_number);
3287 }
3288
3289 for (size_t i = 0; i < loopCount; i++) {
3290 /* handleMetadataWithLock is called even for invalid_metadata for
3291 * pipeline depth calculation */
3292 if (!invalid_metadata) {
3293 /* Infer frame number. Batch metadata contains frame number of the
3294 * last frame */
3295 if (urgent_frame_number_valid) {
3296 if (i < urgentFrameNumDiff) {
3297 urgent_frame_number =
3298 first_urgent_frame_number + i;
3299 LOGD("inferred urgent frame_number: %d",
3300 urgent_frame_number);
3301 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3302 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3303 } else {
3304 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3305 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3306 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3307 }
3308 }
3309
3310 /* Infer frame number. Batch metadata contains frame number of the
3311 * last frame */
3312 if (frame_number_valid) {
3313 if (i < frameNumDiff) {
3314 frame_number = first_frame_number + i;
3315 LOGD("inferred frame_number: %d", frame_number);
3316 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3317 CAM_INTF_META_FRAME_NUMBER, frame_number);
3318 } else {
3319 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3320 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3321 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3322 }
3323 }
3324
3325 if (last_frame_capture_time) {
3326 //Infer timestamp
3327 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003328 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003329 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003330 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003331 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3332 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3333 LOGD("batch capture_time: %lld, capture_time: %lld",
3334 last_frame_capture_time, capture_time);
3335 }
3336 }
3337 pthread_mutex_lock(&mMutex);
3338 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003339 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003340 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3341 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003342 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003343 pthread_mutex_unlock(&mMutex);
3344 }
3345
3346 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003347 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003348 mMetadataChannel->bufDone(metadata_buf);
3349 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003350 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003351 }
3352}
3353
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003354void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3355 camera3_error_msg_code_t errorCode)
3356{
3357 camera3_notify_msg_t notify_msg;
3358 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3359 notify_msg.type = CAMERA3_MSG_ERROR;
3360 notify_msg.message.error.error_code = errorCode;
3361 notify_msg.message.error.error_stream = NULL;
3362 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003363 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003364
3365 return;
3366}
Thierry Strudel3d639192016-09-09 11:52:26 -07003367/*===========================================================================
3368 * FUNCTION : handleMetadataWithLock
3369 *
3370 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3371 *
3372 * PARAMETERS : @metadata_buf: metadata buffer
3373 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3374 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003375 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3376 * last urgent metadata in a batch. Always true for non-batch mode
3377 * @lastMetadataInBatch: Boolean to indicate whether this is the
3378 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003379 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3380 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003381 *
3382 * RETURN :
3383 *
3384 *==========================================================================*/
3385void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003386 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003387 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3388 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003389{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003390 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003391 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3392 //during flush do not send metadata from this thread
3393 LOGD("not sending metadata during flush or when mState is error");
3394 if (free_and_bufdone_meta_buf) {
3395 mMetadataChannel->bufDone(metadata_buf);
3396 free(metadata_buf);
3397 }
3398 return;
3399 }
3400
3401 //not in flush
3402 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3403 int32_t frame_number_valid, urgent_frame_number_valid;
3404 uint32_t frame_number, urgent_frame_number;
3405 int64_t capture_time;
3406 nsecs_t currentSysTime;
3407
3408 int32_t *p_frame_number_valid =
3409 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3410 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3411 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3412 int32_t *p_urgent_frame_number_valid =
3413 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3414 uint32_t *p_urgent_frame_number =
3415 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3416 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3417 metadata) {
3418 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3419 *p_frame_number_valid, *p_frame_number);
3420 }
3421
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003422 camera_metadata_t *resultMetadata = nullptr;
3423
Thierry Strudel3d639192016-09-09 11:52:26 -07003424 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3425 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3426 LOGE("Invalid metadata");
3427 if (free_and_bufdone_meta_buf) {
3428 mMetadataChannel->bufDone(metadata_buf);
3429 free(metadata_buf);
3430 }
3431 goto done_metadata;
3432 }
3433 frame_number_valid = *p_frame_number_valid;
3434 frame_number = *p_frame_number;
3435 capture_time = *p_capture_time;
3436 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3437 urgent_frame_number = *p_urgent_frame_number;
3438 currentSysTime = systemTime(CLOCK_MONOTONIC);
3439
3440 // Detect if buffers from any requests are overdue
3441 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003442 int64_t timeout;
3443 {
3444 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3445 // If there is a pending HDR+ request, the following requests may be blocked until the
3446 // HDR+ request is done. So allow a longer timeout.
3447 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3448 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3449 }
3450
3451 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003452 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003453 assert(missed.stream->priv);
3454 if (missed.stream->priv) {
3455 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3456 assert(ch->mStreams[0]);
3457 if (ch->mStreams[0]) {
3458 LOGE("Cancel missing frame = %d, buffer = %p,"
3459 "stream type = %d, stream format = %d",
3460 req.frame_number, missed.buffer,
3461 ch->mStreams[0]->getMyType(), missed.stream->format);
3462 ch->timeoutFrame(req.frame_number);
3463 }
3464 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003465 }
3466 }
3467 }
3468 //Partial result on process_capture_result for timestamp
3469 if (urgent_frame_number_valid) {
3470 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3471 urgent_frame_number, capture_time);
3472
3473 //Recieved an urgent Frame Number, handle it
3474 //using partial results
3475 for (pendingRequestIterator i =
3476 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3477 LOGD("Iterator Frame = %d urgent frame = %d",
3478 i->frame_number, urgent_frame_number);
3479
3480 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3481 (i->partial_result_cnt == 0)) {
3482 LOGE("Error: HAL missed urgent metadata for frame number %d",
3483 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003484 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003485 }
3486
3487 if (i->frame_number == urgent_frame_number &&
3488 i->bUrgentReceived == 0) {
3489
3490 camera3_capture_result_t result;
3491 memset(&result, 0, sizeof(camera3_capture_result_t));
3492
3493 i->partial_result_cnt++;
3494 i->bUrgentReceived = 1;
3495 // Extract 3A metadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003496 result.result = translateCbUrgentMetadataToResultMetadata(
3497 metadata, lastUrgentMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003498 // Populate metadata result
3499 result.frame_number = urgent_frame_number;
3500 result.num_output_buffers = 0;
3501 result.output_buffers = NULL;
3502 result.partial_result = i->partial_result_cnt;
3503
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003504 {
3505 Mutex::Autolock l(gHdrPlusClientLock);
3506 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3507 // Notify HDR+ client about the partial metadata.
3508 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3509 result.partial_result == PARTIAL_RESULT_COUNT);
3510 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003511 }
3512
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003513 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003514 LOGD("urgent frame_number = %u, capture_time = %lld",
3515 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003516 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3517 // Instant AEC settled for this frame.
3518 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3519 mInstantAECSettledFrameNumber = urgent_frame_number;
3520 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003521 free_camera_metadata((camera_metadata_t *)result.result);
3522 break;
3523 }
3524 }
3525 }
3526
3527 if (!frame_number_valid) {
3528 LOGD("Not a valid normal frame number, used as SOF only");
3529 if (free_and_bufdone_meta_buf) {
3530 mMetadataChannel->bufDone(metadata_buf);
3531 free(metadata_buf);
3532 }
3533 goto done_metadata;
3534 }
3535 LOGH("valid frame_number = %u, capture_time = %lld",
3536 frame_number, capture_time);
3537
Emilian Peev7650c122017-01-19 08:24:33 -08003538 if (metadata->is_depth_data_valid) {
3539 handleDepthDataLocked(metadata->depth_data, frame_number);
3540 }
3541
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003542 // Check whether any stream buffer corresponding to this is dropped or not
3543 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3544 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3545 for (auto & pendingRequest : mPendingRequestsList) {
3546 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3547 mInstantAECSettledFrameNumber)) {
3548 camera3_notify_msg_t notify_msg = {};
3549 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003550 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003551 QCamera3ProcessingChannel *channel =
3552 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003553 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003554 if (p_cam_frame_drop) {
3555 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003556 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003557 // Got the stream ID for drop frame.
3558 dropFrame = true;
3559 break;
3560 }
3561 }
3562 } else {
3563 // This is instant AEC case.
3564 // For instant AEC drop the stream untill AEC is settled.
3565 dropFrame = true;
3566 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003567
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003568 if (dropFrame) {
3569 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3570 if (p_cam_frame_drop) {
3571 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003572 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003573 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003574 } else {
3575 // For instant AEC, inform frame drop and frame number
3576 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3577 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003578 pendingRequest.frame_number, streamID,
3579 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003580 }
3581 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003582 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003583 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003584 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003585 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003586 if (p_cam_frame_drop) {
3587 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003588 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003589 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003590 } else {
3591 // For instant AEC, inform frame drop and frame number
3592 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3593 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003594 pendingRequest.frame_number, streamID,
3595 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003596 }
3597 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003598 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003599 PendingFrameDrop.stream_ID = streamID;
3600 // Add the Frame drop info to mPendingFrameDropList
3601 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003602 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003603 }
3604 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003605 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003606
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003607 for (auto & pendingRequest : mPendingRequestsList) {
3608 // Find the pending request with the frame number.
3609 if (pendingRequest.frame_number == frame_number) {
3610 // Update the sensor timestamp.
3611 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003612
Thierry Strudel3d639192016-09-09 11:52:26 -07003613
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003614 /* Set the timestamp in display metadata so that clients aware of
3615 private_handle such as VT can use this un-modified timestamps.
3616 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003617 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003618
Thierry Strudel3d639192016-09-09 11:52:26 -07003619 // Find channel requiring metadata, meaning internal offline postprocess
3620 // is needed.
3621 //TODO: for now, we don't support two streams requiring metadata at the same time.
3622 // (because we are not making copies, and metadata buffer is not reference counted.
3623 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003624 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3625 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003626 if (iter->need_metadata) {
3627 internalPproc = true;
3628 QCamera3ProcessingChannel *channel =
3629 (QCamera3ProcessingChannel *)iter->stream->priv;
3630 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003631 if(p_is_metabuf_queued != NULL) {
3632 *p_is_metabuf_queued = true;
3633 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003634 break;
3635 }
3636 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003637 for (auto itr = pendingRequest.internalRequestList.begin();
3638 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003639 if (itr->need_metadata) {
3640 internalPproc = true;
3641 QCamera3ProcessingChannel *channel =
3642 (QCamera3ProcessingChannel *)itr->stream->priv;
3643 channel->queueReprocMetadata(metadata_buf);
3644 break;
3645 }
3646 }
3647
Thierry Strudel54dc9782017-02-15 12:12:10 -08003648 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003649
3650 bool *enableZsl = nullptr;
3651 if (gExposeEnableZslKey) {
3652 enableZsl = &pendingRequest.enableZsl;
3653 }
3654
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003655 resultMetadata = translateFromHalMetadata(metadata,
3656 pendingRequest.timestamp, pendingRequest.request_id,
3657 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3658 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003659 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003660 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003661 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003662 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003663 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003664 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003665
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003666 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003667
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003668 if (pendingRequest.blob_request) {
3669 //Dump tuning metadata if enabled and available
3670 char prop[PROPERTY_VALUE_MAX];
3671 memset(prop, 0, sizeof(prop));
3672 property_get("persist.camera.dumpmetadata", prop, "0");
3673 int32_t enabled = atoi(prop);
3674 if (enabled && metadata->is_tuning_params_valid) {
3675 dumpMetadataToFile(metadata->tuning_params,
3676 mMetaFrameCount,
3677 enabled,
3678 "Snapshot",
3679 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003680 }
3681 }
3682
3683 if (!internalPproc) {
3684 LOGD("couldn't find need_metadata for this metadata");
3685 // Return metadata buffer
3686 if (free_and_bufdone_meta_buf) {
3687 mMetadataChannel->bufDone(metadata_buf);
3688 free(metadata_buf);
3689 }
3690 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003691
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003692 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003693 }
3694 }
3695
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003696 // Try to send out shutter callbacks and capture results.
3697 handlePendingResultsWithLock(frame_number, resultMetadata);
3698 return;
3699
Thierry Strudel3d639192016-09-09 11:52:26 -07003700done_metadata:
3701 for (pendingRequestIterator i = mPendingRequestsList.begin();
3702 i != mPendingRequestsList.end() ;i++) {
3703 i->pipeline_depth++;
3704 }
3705 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3706 unblockRequestIfNecessary();
3707}
3708
3709/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003710 * FUNCTION : handleDepthDataWithLock
3711 *
3712 * DESCRIPTION: Handles incoming depth data
3713 *
3714 * PARAMETERS : @depthData : Depth data
3715 * @frameNumber: Frame number of the incoming depth data
3716 *
3717 * RETURN :
3718 *
3719 *==========================================================================*/
3720void QCamera3HardwareInterface::handleDepthDataLocked(
3721 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3722 uint32_t currentFrameNumber;
3723 buffer_handle_t *depthBuffer;
3724
3725 if (nullptr == mDepthChannel) {
3726 LOGE("Depth channel not present!");
3727 return;
3728 }
3729
3730 camera3_stream_buffer_t resultBuffer =
3731 {.acquire_fence = -1,
3732 .release_fence = -1,
3733 .status = CAMERA3_BUFFER_STATUS_OK,
3734 .buffer = nullptr,
3735 .stream = mDepthChannel->getStream()};
3736 camera3_capture_result_t result =
3737 {.result = nullptr,
3738 .num_output_buffers = 1,
3739 .output_buffers = &resultBuffer,
3740 .partial_result = 0,
3741 .frame_number = 0};
3742
3743 do {
3744 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3745 if (nullptr == depthBuffer) {
3746 break;
3747 }
3748
3749 result.frame_number = currentFrameNumber;
3750 resultBuffer.buffer = depthBuffer;
3751 if (currentFrameNumber == frameNumber) {
3752 int32_t rc = mDepthChannel->populateDepthData(depthData,
3753 frameNumber);
3754 if (NO_ERROR != rc) {
3755 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3756 } else {
3757 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3758 }
3759 } else if (currentFrameNumber > frameNumber) {
3760 break;
3761 } else {
3762 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3763 {{currentFrameNumber, mDepthChannel->getStream(),
3764 CAMERA3_MSG_ERROR_BUFFER}}};
3765 orchestrateNotify(&notify_msg);
3766
3767 LOGE("Depth buffer for frame number: %d is missing "
3768 "returning back!", currentFrameNumber);
3769 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3770 }
3771 mDepthChannel->unmapBuffer(currentFrameNumber);
3772
3773 orchestrateResult(&result);
3774 } while (currentFrameNumber < frameNumber);
3775}
3776
3777/*===========================================================================
3778 * FUNCTION : notifyErrorFoPendingDepthData
3779 *
3780 * DESCRIPTION: Returns error for any pending depth buffers
3781 *
3782 * PARAMETERS : depthCh - depth channel that needs to get flushed
3783 *
3784 * RETURN :
3785 *
3786 *==========================================================================*/
3787void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3788 QCamera3DepthChannel *depthCh) {
3789 uint32_t currentFrameNumber;
3790 buffer_handle_t *depthBuffer;
3791
3792 if (nullptr == depthCh) {
3793 return;
3794 }
3795
3796 camera3_notify_msg_t notify_msg =
3797 {.type = CAMERA3_MSG_ERROR,
3798 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3799 camera3_stream_buffer_t resultBuffer =
3800 {.acquire_fence = -1,
3801 .release_fence = -1,
3802 .buffer = nullptr,
3803 .stream = depthCh->getStream(),
3804 .status = CAMERA3_BUFFER_STATUS_ERROR};
3805 camera3_capture_result_t result =
3806 {.result = nullptr,
3807 .frame_number = 0,
3808 .num_output_buffers = 1,
3809 .partial_result = 0,
3810 .output_buffers = &resultBuffer};
3811
3812 while (nullptr !=
3813 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3814 depthCh->unmapBuffer(currentFrameNumber);
3815
3816 notify_msg.message.error.frame_number = currentFrameNumber;
3817 orchestrateNotify(&notify_msg);
3818
3819 resultBuffer.buffer = depthBuffer;
3820 result.frame_number = currentFrameNumber;
3821 orchestrateResult(&result);
3822 };
3823}
3824
3825/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003826 * FUNCTION : hdrPlusPerfLock
3827 *
3828 * DESCRIPTION: perf lock for HDR+ using custom intent
3829 *
3830 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3831 *
3832 * RETURN : None
3833 *
3834 *==========================================================================*/
3835void QCamera3HardwareInterface::hdrPlusPerfLock(
3836 mm_camera_super_buf_t *metadata_buf)
3837{
3838 if (NULL == metadata_buf) {
3839 LOGE("metadata_buf is NULL");
3840 return;
3841 }
3842 metadata_buffer_t *metadata =
3843 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3844 int32_t *p_frame_number_valid =
3845 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3846 uint32_t *p_frame_number =
3847 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3848
3849 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3850 LOGE("%s: Invalid metadata", __func__);
3851 return;
3852 }
3853
3854 //acquire perf lock for 5 sec after the last HDR frame is captured
3855 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3856 if ((p_frame_number != NULL) &&
3857 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003858 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003859 }
3860 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003861}
3862
3863/*===========================================================================
3864 * FUNCTION : handleInputBufferWithLock
3865 *
3866 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3867 *
3868 * PARAMETERS : @frame_number: frame number of the input buffer
3869 *
3870 * RETURN :
3871 *
3872 *==========================================================================*/
3873void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3874{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003875 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003876 pendingRequestIterator i = mPendingRequestsList.begin();
3877 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3878 i++;
3879 }
3880 if (i != mPendingRequestsList.end() && i->input_buffer) {
3881 //found the right request
3882 if (!i->shutter_notified) {
3883 CameraMetadata settings;
3884 camera3_notify_msg_t notify_msg;
3885 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3886 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3887 if(i->settings) {
3888 settings = i->settings;
3889 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3890 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3891 } else {
3892 LOGE("No timestamp in input settings! Using current one.");
3893 }
3894 } else {
3895 LOGE("Input settings missing!");
3896 }
3897
3898 notify_msg.type = CAMERA3_MSG_SHUTTER;
3899 notify_msg.message.shutter.frame_number = frame_number;
3900 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003901 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003902 i->shutter_notified = true;
3903 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3904 i->frame_number, notify_msg.message.shutter.timestamp);
3905 }
3906
3907 if (i->input_buffer->release_fence != -1) {
3908 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3909 close(i->input_buffer->release_fence);
3910 if (rc != OK) {
3911 LOGE("input buffer sync wait failed %d", rc);
3912 }
3913 }
3914
3915 camera3_capture_result result;
3916 memset(&result, 0, sizeof(camera3_capture_result));
3917 result.frame_number = frame_number;
3918 result.result = i->settings;
3919 result.input_buffer = i->input_buffer;
3920 result.partial_result = PARTIAL_RESULT_COUNT;
3921
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003922 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003923 LOGD("Input request metadata and input buffer frame_number = %u",
3924 i->frame_number);
3925 i = erasePendingRequest(i);
3926 } else {
3927 LOGE("Could not find input request for frame number %d", frame_number);
3928 }
3929}
3930
3931/*===========================================================================
3932 * FUNCTION : handleBufferWithLock
3933 *
3934 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3935 *
3936 * PARAMETERS : @buffer: image buffer for the callback
3937 * @frame_number: frame number of the image buffer
3938 *
3939 * RETURN :
3940 *
3941 *==========================================================================*/
3942void QCamera3HardwareInterface::handleBufferWithLock(
3943 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3944{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003945 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003946
3947 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3948 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3949 }
3950
Thierry Strudel3d639192016-09-09 11:52:26 -07003951 /* Nothing to be done during error state */
3952 if ((ERROR == mState) || (DEINIT == mState)) {
3953 return;
3954 }
3955 if (mFlushPerf) {
3956 handleBuffersDuringFlushLock(buffer);
3957 return;
3958 }
3959 //not in flush
3960 // If the frame number doesn't exist in the pending request list,
3961 // directly send the buffer to the frameworks, and update pending buffers map
3962 // Otherwise, book-keep the buffer.
3963 pendingRequestIterator i = mPendingRequestsList.begin();
3964 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3965 i++;
3966 }
3967 if (i == mPendingRequestsList.end()) {
3968 // Verify all pending requests frame_numbers are greater
3969 for (pendingRequestIterator j = mPendingRequestsList.begin();
3970 j != mPendingRequestsList.end(); j++) {
3971 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3972 LOGW("Error: pending live frame number %d is smaller than %d",
3973 j->frame_number, frame_number);
3974 }
3975 }
3976 camera3_capture_result_t result;
3977 memset(&result, 0, sizeof(camera3_capture_result_t));
3978 result.result = NULL;
3979 result.frame_number = frame_number;
3980 result.num_output_buffers = 1;
3981 result.partial_result = 0;
3982 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3983 m != mPendingFrameDropList.end(); m++) {
3984 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3985 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3986 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3987 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3988 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3989 frame_number, streamID);
3990 m = mPendingFrameDropList.erase(m);
3991 break;
3992 }
3993 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003994 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003995 result.output_buffers = buffer;
3996 LOGH("result frame_number = %d, buffer = %p",
3997 frame_number, buffer->buffer);
3998
3999 mPendingBuffersMap.removeBuf(buffer->buffer);
4000
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004001 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004002 } else {
4003 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004004 if (i->input_buffer->release_fence != -1) {
4005 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
4006 close(i->input_buffer->release_fence);
4007 if (rc != OK) {
4008 LOGE("input buffer sync wait failed %d", rc);
4009 }
4010 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004011 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004012
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004013 // Put buffer into the pending request
4014 for (auto &requestedBuffer : i->buffers) {
4015 if (requestedBuffer.stream == buffer->stream) {
4016 if (requestedBuffer.buffer != nullptr) {
4017 LOGE("Error: buffer is already set");
4018 } else {
4019 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
4020 sizeof(camera3_stream_buffer_t));
4021 *(requestedBuffer.buffer) = *buffer;
4022 LOGH("cache buffer %p at result frame_number %u",
4023 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07004024 }
4025 }
4026 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004027
4028 if (i->input_buffer) {
4029 // For a reprocessing request, try to send out shutter callback and result metadata.
4030 handlePendingResultsWithLock(frame_number, nullptr);
4031 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004032 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004033
4034 if (mPreviewStarted == false) {
4035 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4036 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004037 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4038
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004039 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4040 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4041 mPreviewStarted = true;
4042
4043 // Set power hint for preview
4044 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4045 }
4046 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004047}
4048
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004049void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4050 const camera_metadata_t *resultMetadata)
4051{
4052 // Find the pending request for this result metadata.
4053 auto requestIter = mPendingRequestsList.begin();
4054 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4055 requestIter++;
4056 }
4057
4058 if (requestIter == mPendingRequestsList.end()) {
4059 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4060 return;
4061 }
4062
4063 // Update the result metadata
4064 requestIter->resultMetadata = resultMetadata;
4065
4066 // Check what type of request this is.
4067 bool liveRequest = false;
4068 if (requestIter->hdrplus) {
4069 // HDR+ request doesn't have partial results.
4070 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4071 } else if (requestIter->input_buffer != nullptr) {
4072 // Reprocessing request result is the same as settings.
4073 requestIter->resultMetadata = requestIter->settings;
4074 // Reprocessing request doesn't have partial results.
4075 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4076 } else {
4077 liveRequest = true;
4078 requestIter->partial_result_cnt++;
4079 mPendingLiveRequest--;
4080
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004081 {
4082 Mutex::Autolock l(gHdrPlusClientLock);
4083 // For a live request, send the metadata to HDR+ client.
4084 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4085 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4086 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4087 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004088 }
4089 }
4090
4091 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4092 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4093 bool readyToSend = true;
4094
4095 // Iterate through the pending requests to send out shutter callbacks and results that are
4096 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4097 // live requests that don't have result metadata yet.
4098 auto iter = mPendingRequestsList.begin();
4099 while (iter != mPendingRequestsList.end()) {
4100 // Check if current pending request is ready. If it's not ready, the following pending
4101 // requests are also not ready.
4102 if (readyToSend && iter->resultMetadata == nullptr) {
4103 readyToSend = false;
4104 }
4105
4106 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4107
4108 std::vector<camera3_stream_buffer_t> outputBuffers;
4109
4110 camera3_capture_result_t result = {};
4111 result.frame_number = iter->frame_number;
4112 result.result = iter->resultMetadata;
4113 result.partial_result = iter->partial_result_cnt;
4114
4115 // If this pending buffer has result metadata, we may be able to send out shutter callback
4116 // and result metadata.
4117 if (iter->resultMetadata != nullptr) {
4118 if (!readyToSend) {
4119 // If any of the previous pending request is not ready, this pending request is
4120 // also not ready to send in order to keep shutter callbacks and result metadata
4121 // in order.
4122 iter++;
4123 continue;
4124 }
4125
4126 // Invoke shutter callback if not yet.
4127 if (!iter->shutter_notified) {
4128 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4129
4130 // Find the timestamp in HDR+ result metadata
4131 camera_metadata_ro_entry_t entry;
4132 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4133 ANDROID_SENSOR_TIMESTAMP, &entry);
4134 if (res != OK) {
4135 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4136 __FUNCTION__, iter->frame_number, strerror(-res), res);
4137 } else {
4138 timestamp = entry.data.i64[0];
4139 }
4140
4141 camera3_notify_msg_t notify_msg = {};
4142 notify_msg.type = CAMERA3_MSG_SHUTTER;
4143 notify_msg.message.shutter.frame_number = iter->frame_number;
4144 notify_msg.message.shutter.timestamp = timestamp;
4145 orchestrateNotify(&notify_msg);
4146 iter->shutter_notified = true;
4147 }
4148
4149 result.input_buffer = iter->input_buffer;
4150
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004151 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4152 // If the result metadata belongs to a live request, notify errors for previous pending
4153 // live requests.
4154 mPendingLiveRequest--;
4155
4156 CameraMetadata dummyMetadata;
4157 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4158 result.result = dummyMetadata.release();
4159
4160 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004161
4162 // partial_result should be PARTIAL_RESULT_CNT in case of
4163 // ERROR_RESULT.
4164 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4165 result.partial_result = PARTIAL_RESULT_COUNT;
4166
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004167 } else {
4168 iter++;
4169 continue;
4170 }
4171
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004172 // Prepare output buffer array
4173 for (auto bufferInfoIter = iter->buffers.begin();
4174 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4175 if (bufferInfoIter->buffer != nullptr) {
4176
4177 QCamera3Channel *channel =
4178 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4179 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4180
4181 // Check if this buffer is a dropped frame.
4182 auto frameDropIter = mPendingFrameDropList.begin();
4183 while (frameDropIter != mPendingFrameDropList.end()) {
4184 if((frameDropIter->stream_ID == streamID) &&
4185 (frameDropIter->frame_number == frameNumber)) {
4186 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4187 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4188 streamID);
4189 mPendingFrameDropList.erase(frameDropIter);
4190 break;
4191 } else {
4192 frameDropIter++;
4193 }
4194 }
4195
4196 // Check buffer error status
4197 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4198 bufferInfoIter->buffer->buffer);
4199 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4200
4201 outputBuffers.push_back(*(bufferInfoIter->buffer));
4202 free(bufferInfoIter->buffer);
4203 bufferInfoIter->buffer = NULL;
4204 }
4205 }
4206
4207 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4208 result.num_output_buffers = outputBuffers.size();
4209
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004210 orchestrateResult(&result);
4211
4212 // For reprocessing, result metadata is the same as settings so do not free it here to
4213 // avoid double free.
4214 if (result.result != iter->settings) {
4215 free_camera_metadata((camera_metadata_t *)result.result);
4216 }
4217 iter->resultMetadata = nullptr;
4218 iter = erasePendingRequest(iter);
4219 }
4220
4221 if (liveRequest) {
4222 for (auto &iter : mPendingRequestsList) {
4223 // Increment pipeline depth for the following pending requests.
4224 if (iter.frame_number > frameNumber) {
4225 iter.pipeline_depth++;
4226 }
4227 }
4228 }
4229
4230 unblockRequestIfNecessary();
4231}
4232
Thierry Strudel3d639192016-09-09 11:52:26 -07004233/*===========================================================================
4234 * FUNCTION : unblockRequestIfNecessary
4235 *
4236 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4237 * that mMutex is held when this function is called.
4238 *
4239 * PARAMETERS :
4240 *
4241 * RETURN :
4242 *
4243 *==========================================================================*/
4244void QCamera3HardwareInterface::unblockRequestIfNecessary()
4245{
4246 // Unblock process_capture_request
4247 pthread_cond_signal(&mRequestCond);
4248}
4249
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004250/*===========================================================================
4251 * FUNCTION : isHdrSnapshotRequest
4252 *
4253 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4254 *
4255 * PARAMETERS : camera3 request structure
4256 *
4257 * RETURN : boolean decision variable
4258 *
4259 *==========================================================================*/
4260bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4261{
4262 if (request == NULL) {
4263 LOGE("Invalid request handle");
4264 assert(0);
4265 return false;
4266 }
4267
4268 if (!mForceHdrSnapshot) {
4269 CameraMetadata frame_settings;
4270 frame_settings = request->settings;
4271
4272 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4273 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4274 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4275 return false;
4276 }
4277 } else {
4278 return false;
4279 }
4280
4281 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4282 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4283 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4284 return false;
4285 }
4286 } else {
4287 return false;
4288 }
4289 }
4290
4291 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4292 if (request->output_buffers[i].stream->format
4293 == HAL_PIXEL_FORMAT_BLOB) {
4294 return true;
4295 }
4296 }
4297
4298 return false;
4299}
4300/*===========================================================================
4301 * FUNCTION : orchestrateRequest
4302 *
4303 * DESCRIPTION: Orchestrates a capture request from camera service
4304 *
4305 * PARAMETERS :
4306 * @request : request from framework to process
4307 *
4308 * RETURN : Error status codes
4309 *
4310 *==========================================================================*/
4311int32_t QCamera3HardwareInterface::orchestrateRequest(
4312 camera3_capture_request_t *request)
4313{
4314
4315 uint32_t originalFrameNumber = request->frame_number;
4316 uint32_t originalOutputCount = request->num_output_buffers;
4317 const camera_metadata_t *original_settings = request->settings;
4318 List<InternalRequest> internallyRequestedStreams;
4319 List<InternalRequest> emptyInternalList;
4320
4321 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4322 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4323 uint32_t internalFrameNumber;
4324 CameraMetadata modified_meta;
4325
4326
4327 /* Add Blob channel to list of internally requested streams */
4328 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4329 if (request->output_buffers[i].stream->format
4330 == HAL_PIXEL_FORMAT_BLOB) {
4331 InternalRequest streamRequested;
4332 streamRequested.meteringOnly = 1;
4333 streamRequested.need_metadata = 0;
4334 streamRequested.stream = request->output_buffers[i].stream;
4335 internallyRequestedStreams.push_back(streamRequested);
4336 }
4337 }
4338 request->num_output_buffers = 0;
4339 auto itr = internallyRequestedStreams.begin();
4340
4341 /* Modify setting to set compensation */
4342 modified_meta = request->settings;
4343 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4344 uint8_t aeLock = 1;
4345 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4346 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4347 camera_metadata_t *modified_settings = modified_meta.release();
4348 request->settings = modified_settings;
4349
4350 /* Capture Settling & -2x frame */
4351 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4352 request->frame_number = internalFrameNumber;
4353 processCaptureRequest(request, internallyRequestedStreams);
4354
4355 request->num_output_buffers = originalOutputCount;
4356 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4357 request->frame_number = internalFrameNumber;
4358 processCaptureRequest(request, emptyInternalList);
4359 request->num_output_buffers = 0;
4360
4361 modified_meta = modified_settings;
4362 expCompensation = 0;
4363 aeLock = 1;
4364 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4365 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4366 modified_settings = modified_meta.release();
4367 request->settings = modified_settings;
4368
4369 /* Capture Settling & 0X frame */
4370
4371 itr = internallyRequestedStreams.begin();
4372 if (itr == internallyRequestedStreams.end()) {
4373 LOGE("Error Internally Requested Stream list is empty");
4374 assert(0);
4375 } else {
4376 itr->need_metadata = 0;
4377 itr->meteringOnly = 1;
4378 }
4379
4380 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4381 request->frame_number = internalFrameNumber;
4382 processCaptureRequest(request, internallyRequestedStreams);
4383
4384 itr = internallyRequestedStreams.begin();
4385 if (itr == internallyRequestedStreams.end()) {
4386 ALOGE("Error Internally Requested Stream list is empty");
4387 assert(0);
4388 } else {
4389 itr->need_metadata = 1;
4390 itr->meteringOnly = 0;
4391 }
4392
4393 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4394 request->frame_number = internalFrameNumber;
4395 processCaptureRequest(request, internallyRequestedStreams);
4396
4397 /* Capture 2X frame*/
4398 modified_meta = modified_settings;
4399 expCompensation = GB_HDR_2X_STEP_EV;
4400 aeLock = 1;
4401 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4402 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4403 modified_settings = modified_meta.release();
4404 request->settings = modified_settings;
4405
4406 itr = internallyRequestedStreams.begin();
4407 if (itr == internallyRequestedStreams.end()) {
4408 ALOGE("Error Internally Requested Stream list is empty");
4409 assert(0);
4410 } else {
4411 itr->need_metadata = 0;
4412 itr->meteringOnly = 1;
4413 }
4414 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4415 request->frame_number = internalFrameNumber;
4416 processCaptureRequest(request, internallyRequestedStreams);
4417
4418 itr = internallyRequestedStreams.begin();
4419 if (itr == internallyRequestedStreams.end()) {
4420 ALOGE("Error Internally Requested Stream list is empty");
4421 assert(0);
4422 } else {
4423 itr->need_metadata = 1;
4424 itr->meteringOnly = 0;
4425 }
4426
4427 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4428 request->frame_number = internalFrameNumber;
4429 processCaptureRequest(request, internallyRequestedStreams);
4430
4431
4432 /* Capture 2X on original streaming config*/
4433 internallyRequestedStreams.clear();
4434
4435 /* Restore original settings pointer */
4436 request->settings = original_settings;
4437 } else {
4438 uint32_t internalFrameNumber;
4439 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4440 request->frame_number = internalFrameNumber;
4441 return processCaptureRequest(request, internallyRequestedStreams);
4442 }
4443
4444 return NO_ERROR;
4445}
4446
4447/*===========================================================================
4448 * FUNCTION : orchestrateResult
4449 *
4450 * DESCRIPTION: Orchestrates a capture result to camera service
4451 *
4452 * PARAMETERS :
4453 * @request : request from framework to process
4454 *
4455 * RETURN :
4456 *
4457 *==========================================================================*/
4458void QCamera3HardwareInterface::orchestrateResult(
4459 camera3_capture_result_t *result)
4460{
4461 uint32_t frameworkFrameNumber;
4462 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4463 frameworkFrameNumber);
4464 if (rc != NO_ERROR) {
4465 LOGE("Cannot find translated frameworkFrameNumber");
4466 assert(0);
4467 } else {
4468 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004469 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004470 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004471 if (result->result != NULL) {
4472 CameraMetadata metadata;
4473 metadata.acquire((camera_metadata_t *)result->result);
4474 if (metadata.exists(ANDROID_SYNC_FRAME_NUMBER)) {
4475 int64_t sync_frame_number = frameworkFrameNumber;
4476 metadata.update(ANDROID_SYNC_FRAME_NUMBER, &sync_frame_number, 1);
4477 }
4478 result->result = metadata.release();
4479 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004480 result->frame_number = frameworkFrameNumber;
4481 mCallbackOps->process_capture_result(mCallbackOps, result);
4482 }
4483 }
4484}
4485
4486/*===========================================================================
4487 * FUNCTION : orchestrateNotify
4488 *
4489 * DESCRIPTION: Orchestrates a notify to camera service
4490 *
4491 * PARAMETERS :
4492 * @request : request from framework to process
4493 *
4494 * RETURN :
4495 *
4496 *==========================================================================*/
4497void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4498{
4499 uint32_t frameworkFrameNumber;
4500 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004501 int32_t rc = NO_ERROR;
4502
4503 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004504 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004505
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004506 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004507 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4508 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4509 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004510 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004511 LOGE("Cannot find translated frameworkFrameNumber");
4512 assert(0);
4513 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004514 }
4515 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004516
4517 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4518 LOGD("Internal Request drop the notifyCb");
4519 } else {
4520 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4521 mCallbackOps->notify(mCallbackOps, notify_msg);
4522 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004523}
4524
4525/*===========================================================================
4526 * FUNCTION : FrameNumberRegistry
4527 *
4528 * DESCRIPTION: Constructor
4529 *
4530 * PARAMETERS :
4531 *
4532 * RETURN :
4533 *
4534 *==========================================================================*/
4535FrameNumberRegistry::FrameNumberRegistry()
4536{
4537 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4538}
4539
4540/*===========================================================================
4541 * FUNCTION : ~FrameNumberRegistry
4542 *
4543 * DESCRIPTION: Destructor
4544 *
4545 * PARAMETERS :
4546 *
4547 * RETURN :
4548 *
4549 *==========================================================================*/
4550FrameNumberRegistry::~FrameNumberRegistry()
4551{
4552}
4553
4554/*===========================================================================
4555 * FUNCTION : PurgeOldEntriesLocked
4556 *
4557 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4558 *
4559 * PARAMETERS :
4560 *
4561 * RETURN : NONE
4562 *
4563 *==========================================================================*/
4564void FrameNumberRegistry::purgeOldEntriesLocked()
4565{
4566 while (_register.begin() != _register.end()) {
4567 auto itr = _register.begin();
4568 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4569 _register.erase(itr);
4570 } else {
4571 return;
4572 }
4573 }
4574}
4575
4576/*===========================================================================
4577 * FUNCTION : allocStoreInternalFrameNumber
4578 *
4579 * DESCRIPTION: Method to note down a framework request and associate a new
4580 * internal request number against it
4581 *
4582 * PARAMETERS :
4583 * @fFrameNumber: Identifier given by framework
4584 * @internalFN : Output parameter which will have the newly generated internal
4585 * entry
4586 *
4587 * RETURN : Error code
4588 *
4589 *==========================================================================*/
4590int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4591 uint32_t &internalFrameNumber)
4592{
4593 Mutex::Autolock lock(mRegistryLock);
4594 internalFrameNumber = _nextFreeInternalNumber++;
4595 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4596 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4597 purgeOldEntriesLocked();
4598 return NO_ERROR;
4599}
4600
4601/*===========================================================================
4602 * FUNCTION : generateStoreInternalFrameNumber
4603 *
4604 * DESCRIPTION: Method to associate a new internal request number independent
4605 * of any associate with framework requests
4606 *
4607 * PARAMETERS :
4608 * @internalFrame#: Output parameter which will have the newly generated internal
4609 *
4610 *
4611 * RETURN : Error code
4612 *
4613 *==========================================================================*/
4614int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4615{
4616 Mutex::Autolock lock(mRegistryLock);
4617 internalFrameNumber = _nextFreeInternalNumber++;
4618 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4619 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4620 purgeOldEntriesLocked();
4621 return NO_ERROR;
4622}
4623
4624/*===========================================================================
4625 * FUNCTION : getFrameworkFrameNumber
4626 *
4627 * DESCRIPTION: Method to query the framework framenumber given an internal #
4628 *
4629 * PARAMETERS :
4630 * @internalFrame#: Internal reference
4631 * @frameworkframenumber: Output parameter holding framework frame entry
4632 *
4633 * RETURN : Error code
4634 *
4635 *==========================================================================*/
4636int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4637 uint32_t &frameworkFrameNumber)
4638{
4639 Mutex::Autolock lock(mRegistryLock);
4640 auto itr = _register.find(internalFrameNumber);
4641 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004642 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004643 return -ENOENT;
4644 }
4645
4646 frameworkFrameNumber = itr->second;
4647 purgeOldEntriesLocked();
4648 return NO_ERROR;
4649}
Thierry Strudel3d639192016-09-09 11:52:26 -07004650
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004651status_t QCamera3HardwareInterface::fillPbStreamConfig(
4652 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4653 QCamera3Channel *channel, uint32_t streamIndex) {
4654 if (config == nullptr) {
4655 LOGE("%s: config is null", __FUNCTION__);
4656 return BAD_VALUE;
4657 }
4658
4659 if (channel == nullptr) {
4660 LOGE("%s: channel is null", __FUNCTION__);
4661 return BAD_VALUE;
4662 }
4663
4664 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4665 if (stream == nullptr) {
4666 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4667 return NAME_NOT_FOUND;
4668 }
4669
4670 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4671 if (streamInfo == nullptr) {
4672 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4673 return NAME_NOT_FOUND;
4674 }
4675
4676 config->id = pbStreamId;
4677 config->image.width = streamInfo->dim.width;
4678 config->image.height = streamInfo->dim.height;
4679 config->image.padding = 0;
4680 config->image.format = pbStreamFormat;
4681
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004682 uint32_t totalPlaneSize = 0;
4683
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004684 // Fill plane information.
4685 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4686 pbcamera::PlaneConfiguration plane;
4687 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4688 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4689 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004690
4691 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004692 }
4693
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004694 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004695 return OK;
4696}
4697
Thierry Strudel3d639192016-09-09 11:52:26 -07004698/*===========================================================================
4699 * FUNCTION : processCaptureRequest
4700 *
4701 * DESCRIPTION: process a capture request from camera service
4702 *
4703 * PARAMETERS :
4704 * @request : request from framework to process
4705 *
4706 * RETURN :
4707 *
4708 *==========================================================================*/
4709int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004710 camera3_capture_request_t *request,
4711 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004712{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004713 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004714 int rc = NO_ERROR;
4715 int32_t request_id;
4716 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004717 bool isVidBufRequested = false;
4718 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004719 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004720
4721 pthread_mutex_lock(&mMutex);
4722
4723 // Validate current state
4724 switch (mState) {
4725 case CONFIGURED:
4726 case STARTED:
4727 /* valid state */
4728 break;
4729
4730 case ERROR:
4731 pthread_mutex_unlock(&mMutex);
4732 handleCameraDeviceError();
4733 return -ENODEV;
4734
4735 default:
4736 LOGE("Invalid state %d", mState);
4737 pthread_mutex_unlock(&mMutex);
4738 return -ENODEV;
4739 }
4740
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004741 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004742 if (rc != NO_ERROR) {
4743 LOGE("incoming request is not valid");
4744 pthread_mutex_unlock(&mMutex);
4745 return rc;
4746 }
4747
4748 meta = request->settings;
4749
4750 // For first capture request, send capture intent, and
4751 // stream on all streams
4752 if (mState == CONFIGURED) {
4753 // send an unconfigure to the backend so that the isp
4754 // resources are deallocated
4755 if (!mFirstConfiguration) {
4756 cam_stream_size_info_t stream_config_info;
4757 int32_t hal_version = CAM_HAL_V3;
4758 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4759 stream_config_info.buffer_info.min_buffers =
4760 MIN_INFLIGHT_REQUESTS;
4761 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004762 m_bIs4KVideo ? 0 :
4763 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004764 clear_metadata_buffer(mParameters);
4765 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4766 CAM_INTF_PARM_HAL_VERSION, hal_version);
4767 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4768 CAM_INTF_META_STREAM_INFO, stream_config_info);
4769 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4770 mParameters);
4771 if (rc < 0) {
4772 LOGE("set_parms for unconfigure failed");
4773 pthread_mutex_unlock(&mMutex);
4774 return rc;
4775 }
4776 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004777 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004778 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004779 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004780 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004781 property_get("persist.camera.is_type", is_type_value, "4");
4782 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4783 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4784 property_get("persist.camera.is_type_preview", is_type_value, "4");
4785 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4786 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004787
4788 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4789 int32_t hal_version = CAM_HAL_V3;
4790 uint8_t captureIntent =
4791 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4792 mCaptureIntent = captureIntent;
4793 clear_metadata_buffer(mParameters);
4794 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4795 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4796 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004797 if (mFirstConfiguration) {
4798 // configure instant AEC
4799 // Instant AEC is a session based parameter and it is needed only
4800 // once per complete session after open camera.
4801 // i.e. This is set only once for the first capture request, after open camera.
4802 setInstantAEC(meta);
4803 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004804 uint8_t fwkVideoStabMode=0;
4805 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4806 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4807 }
4808
Xue Tuecac74e2017-04-17 13:58:15 -07004809 // If EIS setprop is enabled then only turn it on for video/preview
4810 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004811 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004812 int32_t vsMode;
4813 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4814 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4815 rc = BAD_VALUE;
4816 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004817 LOGD("setEis %d", setEis);
4818 bool eis3Supported = false;
4819 size_t count = IS_TYPE_MAX;
4820 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4821 for (size_t i = 0; i < count; i++) {
4822 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4823 eis3Supported = true;
4824 break;
4825 }
4826 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004827
4828 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004829 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004830 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4831 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004832 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4833 is_type = isTypePreview;
4834 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4835 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4836 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004837 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004838 } else {
4839 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004840 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004841 } else {
4842 is_type = IS_TYPE_NONE;
4843 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004844 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004845 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004846 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4847 }
4848 }
4849
4850 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4851 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4852
Thierry Strudel54dc9782017-02-15 12:12:10 -08004853 //Disable tintless only if the property is set to 0
4854 memset(prop, 0, sizeof(prop));
4855 property_get("persist.camera.tintless.enable", prop, "1");
4856 int32_t tintless_value = atoi(prop);
4857
Thierry Strudel3d639192016-09-09 11:52:26 -07004858 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4859 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004860
Thierry Strudel3d639192016-09-09 11:52:26 -07004861 //Disable CDS for HFR mode or if DIS/EIS is on.
4862 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4863 //after every configure_stream
4864 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4865 (m_bIsVideo)) {
4866 int32_t cds = CAM_CDS_MODE_OFF;
4867 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4868 CAM_INTF_PARM_CDS_MODE, cds))
4869 LOGE("Failed to disable CDS for HFR mode");
4870
4871 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004872
4873 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4874 uint8_t* use_av_timer = NULL;
4875
4876 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004877 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004878 use_av_timer = &m_debug_avtimer;
4879 }
4880 else{
4881 use_av_timer =
4882 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004883 if (use_av_timer) {
4884 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4885 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004886 }
4887
4888 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4889 rc = BAD_VALUE;
4890 }
4891 }
4892
Thierry Strudel3d639192016-09-09 11:52:26 -07004893 setMobicat();
4894
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004895 uint8_t nrMode = 0;
4896 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4897 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4898 }
4899
Thierry Strudel3d639192016-09-09 11:52:26 -07004900 /* Set fps and hfr mode while sending meta stream info so that sensor
4901 * can configure appropriate streaming mode */
4902 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004903 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4904 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004905 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4906 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004907 if (rc == NO_ERROR) {
4908 int32_t max_fps =
4909 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004910 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004911 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4912 }
4913 /* For HFR, more buffers are dequeued upfront to improve the performance */
4914 if (mBatchSize) {
4915 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4916 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4917 }
4918 }
4919 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004920 LOGE("setHalFpsRange failed");
4921 }
4922 }
4923 if (meta.exists(ANDROID_CONTROL_MODE)) {
4924 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4925 rc = extractSceneMode(meta, metaMode, mParameters);
4926 if (rc != NO_ERROR) {
4927 LOGE("extractSceneMode failed");
4928 }
4929 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004930 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004931
Thierry Strudel04e026f2016-10-10 11:27:36 -07004932 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4933 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4934 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4935 rc = setVideoHdrMode(mParameters, vhdr);
4936 if (rc != NO_ERROR) {
4937 LOGE("setVideoHDR is failed");
4938 }
4939 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004940
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004941 if (meta.exists(NEXUS_EXPERIMENTAL_2017_SENSOR_MODE_FULLFOV)) {
4942 uint8_t sensorModeFullFov =
4943 meta.find(NEXUS_EXPERIMENTAL_2017_SENSOR_MODE_FULLFOV).data.u8[0];
4944 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4945 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4946 sensorModeFullFov)) {
4947 rc = BAD_VALUE;
4948 }
4949 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004950 //TODO: validate the arguments, HSV scenemode should have only the
4951 //advertised fps ranges
4952
4953 /*set the capture intent, hal version, tintless, stream info,
4954 *and disenable parameters to the backend*/
4955 LOGD("set_parms META_STREAM_INFO " );
4956 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004957 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4958 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004959 mStreamConfigInfo.type[i],
4960 mStreamConfigInfo.stream_sizes[i].width,
4961 mStreamConfigInfo.stream_sizes[i].height,
4962 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004963 mStreamConfigInfo.format[i],
4964 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004965 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004966
Thierry Strudel3d639192016-09-09 11:52:26 -07004967 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4968 mParameters);
4969 if (rc < 0) {
4970 LOGE("set_parms failed for hal version, stream info");
4971 }
4972
Chien-Yu Chenee335912017-02-09 17:53:20 -08004973 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4974 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004975 if (rc != NO_ERROR) {
4976 LOGE("Failed to get sensor output size");
4977 pthread_mutex_unlock(&mMutex);
4978 goto error_exit;
4979 }
4980
4981 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4982 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004983 mSensorModeInfo.active_array_size.width,
4984 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004985
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004986 {
4987 Mutex::Autolock l(gHdrPlusClientLock);
4988 if (EaselManagerClientOpened) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004989 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004990 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
4991 if (rc != OK) {
4992 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
4993 mCameraId, mSensorModeInfo.op_pixel_clk);
4994 pthread_mutex_unlock(&mMutex);
4995 goto error_exit;
4996 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004997 }
4998 }
4999
Thierry Strudel3d639192016-09-09 11:52:26 -07005000 /* Set batchmode before initializing channel. Since registerBuffer
5001 * internally initializes some of the channels, better set batchmode
5002 * even before first register buffer */
5003 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5004 it != mStreamInfo.end(); it++) {
5005 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5006 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5007 && mBatchSize) {
5008 rc = channel->setBatchSize(mBatchSize);
5009 //Disable per frame map unmap for HFR/batchmode case
5010 rc |= channel->setPerFrameMapUnmap(false);
5011 if (NO_ERROR != rc) {
5012 LOGE("Channel init failed %d", rc);
5013 pthread_mutex_unlock(&mMutex);
5014 goto error_exit;
5015 }
5016 }
5017 }
5018
5019 //First initialize all streams
5020 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5021 it != mStreamInfo.end(); it++) {
5022 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005023
5024 /* Initial value of NR mode is needed before stream on */
5025 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005026 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5027 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005028 setEis) {
5029 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5030 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5031 is_type = mStreamConfigInfo.is_type[i];
5032 break;
5033 }
5034 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005035 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005036 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005037 rc = channel->initialize(IS_TYPE_NONE);
5038 }
5039 if (NO_ERROR != rc) {
5040 LOGE("Channel initialization failed %d", rc);
5041 pthread_mutex_unlock(&mMutex);
5042 goto error_exit;
5043 }
5044 }
5045
5046 if (mRawDumpChannel) {
5047 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5048 if (rc != NO_ERROR) {
5049 LOGE("Error: Raw Dump Channel init failed");
5050 pthread_mutex_unlock(&mMutex);
5051 goto error_exit;
5052 }
5053 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005054 if (mHdrPlusRawSrcChannel) {
5055 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5056 if (rc != NO_ERROR) {
5057 LOGE("Error: HDR+ RAW Source Channel init failed");
5058 pthread_mutex_unlock(&mMutex);
5059 goto error_exit;
5060 }
5061 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005062 if (mSupportChannel) {
5063 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5064 if (rc < 0) {
5065 LOGE("Support channel initialization failed");
5066 pthread_mutex_unlock(&mMutex);
5067 goto error_exit;
5068 }
5069 }
5070 if (mAnalysisChannel) {
5071 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5072 if (rc < 0) {
5073 LOGE("Analysis channel initialization failed");
5074 pthread_mutex_unlock(&mMutex);
5075 goto error_exit;
5076 }
5077 }
5078 if (mDummyBatchChannel) {
5079 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5080 if (rc < 0) {
5081 LOGE("mDummyBatchChannel setBatchSize failed");
5082 pthread_mutex_unlock(&mMutex);
5083 goto error_exit;
5084 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005085 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005086 if (rc < 0) {
5087 LOGE("mDummyBatchChannel initialization failed");
5088 pthread_mutex_unlock(&mMutex);
5089 goto error_exit;
5090 }
5091 }
5092
5093 // Set bundle info
5094 rc = setBundleInfo();
5095 if (rc < 0) {
5096 LOGE("setBundleInfo failed %d", rc);
5097 pthread_mutex_unlock(&mMutex);
5098 goto error_exit;
5099 }
5100
5101 //update settings from app here
5102 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5103 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5104 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5105 }
5106 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5107 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5108 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5109 }
5110 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5111 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5112 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5113
5114 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5115 (mLinkedCameraId != mCameraId) ) {
5116 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5117 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005118 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005119 goto error_exit;
5120 }
5121 }
5122
5123 // add bundle related cameras
5124 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5125 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005126 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5127 &m_pDualCamCmdPtr->bundle_info;
5128 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005129 if (mIsDeviceLinked)
5130 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5131 else
5132 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5133
5134 pthread_mutex_lock(&gCamLock);
5135
5136 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5137 LOGE("Dualcam: Invalid Session Id ");
5138 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005139 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005140 goto error_exit;
5141 }
5142
5143 if (mIsMainCamera == 1) {
5144 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5145 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005146 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005147 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005148 // related session id should be session id of linked session
5149 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5150 } else {
5151 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5152 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005153 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005154 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005155 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5156 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005157 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005158 pthread_mutex_unlock(&gCamLock);
5159
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005160 rc = mCameraHandle->ops->set_dual_cam_cmd(
5161 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005162 if (rc < 0) {
5163 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005164 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005165 goto error_exit;
5166 }
5167 }
5168
5169 //Then start them.
5170 LOGH("Start META Channel");
5171 rc = mMetadataChannel->start();
5172 if (rc < 0) {
5173 LOGE("META channel start failed");
5174 pthread_mutex_unlock(&mMutex);
5175 goto error_exit;
5176 }
5177
5178 if (mAnalysisChannel) {
5179 rc = mAnalysisChannel->start();
5180 if (rc < 0) {
5181 LOGE("Analysis channel start failed");
5182 mMetadataChannel->stop();
5183 pthread_mutex_unlock(&mMutex);
5184 goto error_exit;
5185 }
5186 }
5187
5188 if (mSupportChannel) {
5189 rc = mSupportChannel->start();
5190 if (rc < 0) {
5191 LOGE("Support channel start failed");
5192 mMetadataChannel->stop();
5193 /* Although support and analysis are mutually exclusive today
5194 adding it in anycase for future proofing */
5195 if (mAnalysisChannel) {
5196 mAnalysisChannel->stop();
5197 }
5198 pthread_mutex_unlock(&mMutex);
5199 goto error_exit;
5200 }
5201 }
5202 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5203 it != mStreamInfo.end(); it++) {
5204 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5205 LOGH("Start Processing Channel mask=%d",
5206 channel->getStreamTypeMask());
5207 rc = channel->start();
5208 if (rc < 0) {
5209 LOGE("channel start failed");
5210 pthread_mutex_unlock(&mMutex);
5211 goto error_exit;
5212 }
5213 }
5214
5215 if (mRawDumpChannel) {
5216 LOGD("Starting raw dump stream");
5217 rc = mRawDumpChannel->start();
5218 if (rc != NO_ERROR) {
5219 LOGE("Error Starting Raw Dump Channel");
5220 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5221 it != mStreamInfo.end(); it++) {
5222 QCamera3Channel *channel =
5223 (QCamera3Channel *)(*it)->stream->priv;
5224 LOGH("Stopping Processing Channel mask=%d",
5225 channel->getStreamTypeMask());
5226 channel->stop();
5227 }
5228 if (mSupportChannel)
5229 mSupportChannel->stop();
5230 if (mAnalysisChannel) {
5231 mAnalysisChannel->stop();
5232 }
5233 mMetadataChannel->stop();
5234 pthread_mutex_unlock(&mMutex);
5235 goto error_exit;
5236 }
5237 }
5238
5239 if (mChannelHandle) {
5240
5241 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5242 mChannelHandle);
5243 if (rc != NO_ERROR) {
5244 LOGE("start_channel failed %d", rc);
5245 pthread_mutex_unlock(&mMutex);
5246 goto error_exit;
5247 }
5248 }
5249
5250 goto no_error;
5251error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005252 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005253 return rc;
5254no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005255 mWokenUpByDaemon = false;
5256 mPendingLiveRequest = 0;
5257 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005258 }
5259
Chien-Yu Chenee335912017-02-09 17:53:20 -08005260 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005261 {
5262 Mutex::Autolock l(gHdrPlusClientLock);
5263 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5264 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5265 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5266 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5267 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5268 rc = enableHdrPlusModeLocked();
Chien-Yu Chenee335912017-02-09 17:53:20 -08005269 if (rc != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005270 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -08005271 pthread_mutex_unlock(&mMutex);
5272 return rc;
5273 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005274
5275 mFirstPreviewIntentSeen = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -08005276 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08005277 }
5278
Thierry Strudel3d639192016-09-09 11:52:26 -07005279 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005280 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005281
5282 if (mFlushPerf) {
5283 //we cannot accept any requests during flush
5284 LOGE("process_capture_request cannot proceed during flush");
5285 pthread_mutex_unlock(&mMutex);
5286 return NO_ERROR; //should return an error
5287 }
5288
5289 if (meta.exists(ANDROID_REQUEST_ID)) {
5290 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5291 mCurrentRequestId = request_id;
5292 LOGD("Received request with id: %d", request_id);
5293 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5294 LOGE("Unable to find request id field, \
5295 & no previous id available");
5296 pthread_mutex_unlock(&mMutex);
5297 return NAME_NOT_FOUND;
5298 } else {
5299 LOGD("Re-using old request id");
5300 request_id = mCurrentRequestId;
5301 }
5302
5303 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5304 request->num_output_buffers,
5305 request->input_buffer,
5306 frameNumber);
5307 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005308 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005309 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005310 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005311 uint32_t snapshotStreamId = 0;
5312 for (size_t i = 0; i < request->num_output_buffers; i++) {
5313 const camera3_stream_buffer_t& output = request->output_buffers[i];
5314 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5315
Emilian Peev7650c122017-01-19 08:24:33 -08005316 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5317 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005318 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005319 blob_request = 1;
5320 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5321 }
5322
5323 if (output.acquire_fence != -1) {
5324 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5325 close(output.acquire_fence);
5326 if (rc != OK) {
5327 LOGE("sync wait failed %d", rc);
5328 pthread_mutex_unlock(&mMutex);
5329 return rc;
5330 }
5331 }
5332
Emilian Peev0f3c3162017-03-15 12:57:46 +00005333 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5334 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005335 depthRequestPresent = true;
5336 continue;
5337 }
5338
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005339 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005340 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005341
5342 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5343 isVidBufRequested = true;
5344 }
5345 }
5346
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005347 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5348 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5349 itr++) {
5350 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5351 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5352 channel->getStreamID(channel->getStreamTypeMask());
5353
5354 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5355 isVidBufRequested = true;
5356 }
5357 }
5358
Thierry Strudel3d639192016-09-09 11:52:26 -07005359 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005360 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005361 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005362 }
5363 if (blob_request && mRawDumpChannel) {
5364 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005365 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005366 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005367 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005368 }
5369
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005370 {
5371 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5372 // Request a RAW buffer if
5373 // 1. mHdrPlusRawSrcChannel is valid.
5374 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5375 // 3. There is no pending HDR+ request.
5376 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5377 mHdrPlusPendingRequests.size() == 0) {
5378 streamsArray.stream_request[streamsArray.num_streams].streamID =
5379 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5380 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5381 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005382 }
5383
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005384 //extract capture intent
5385 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5386 mCaptureIntent =
5387 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5388 }
5389
5390 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5391 mCacMode =
5392 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5393 }
5394
5395 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005396 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005397
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005398 {
5399 Mutex::Autolock l(gHdrPlusClientLock);
5400 // If this request has a still capture intent, try to submit an HDR+ request.
5401 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5402 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5403 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5404 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005405 }
5406
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005407 if (hdrPlusRequest) {
5408 // For a HDR+ request, just set the frame parameters.
5409 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5410 if (rc < 0) {
5411 LOGE("fail to set frame parameters");
5412 pthread_mutex_unlock(&mMutex);
5413 return rc;
5414 }
5415 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005416 /* Parse the settings:
5417 * - For every request in NORMAL MODE
5418 * - For every request in HFR mode during preview only case
5419 * - For first request of every batch in HFR mode during video
5420 * recording. In batchmode the same settings except frame number is
5421 * repeated in each request of the batch.
5422 */
5423 if (!mBatchSize ||
5424 (mBatchSize && !isVidBufRequested) ||
5425 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005426 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005427 if (rc < 0) {
5428 LOGE("fail to set frame parameters");
5429 pthread_mutex_unlock(&mMutex);
5430 return rc;
5431 }
5432 }
5433 /* For batchMode HFR, setFrameParameters is not called for every
5434 * request. But only frame number of the latest request is parsed.
5435 * Keep track of first and last frame numbers in a batch so that
5436 * metadata for the frame numbers of batch can be duplicated in
5437 * handleBatchMetadta */
5438 if (mBatchSize) {
5439 if (!mToBeQueuedVidBufs) {
5440 //start of the batch
5441 mFirstFrameNumberInBatch = request->frame_number;
5442 }
5443 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5444 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5445 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005446 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005447 return BAD_VALUE;
5448 }
5449 }
5450 if (mNeedSensorRestart) {
5451 /* Unlock the mutex as restartSensor waits on the channels to be
5452 * stopped, which in turn calls stream callback functions -
5453 * handleBufferWithLock and handleMetadataWithLock */
5454 pthread_mutex_unlock(&mMutex);
5455 rc = dynamicUpdateMetaStreamInfo();
5456 if (rc != NO_ERROR) {
5457 LOGE("Restarting the sensor failed");
5458 return BAD_VALUE;
5459 }
5460 mNeedSensorRestart = false;
5461 pthread_mutex_lock(&mMutex);
5462 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005463 if(mResetInstantAEC) {
5464 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5465 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5466 mResetInstantAEC = false;
5467 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005468 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005469 if (request->input_buffer->acquire_fence != -1) {
5470 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5471 close(request->input_buffer->acquire_fence);
5472 if (rc != OK) {
5473 LOGE("input buffer sync wait failed %d", rc);
5474 pthread_mutex_unlock(&mMutex);
5475 return rc;
5476 }
5477 }
5478 }
5479
5480 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5481 mLastCustIntentFrmNum = frameNumber;
5482 }
5483 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005484 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005485 pendingRequestIterator latestRequest;
5486 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005487 pendingRequest.num_buffers = depthRequestPresent ?
5488 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005489 pendingRequest.request_id = request_id;
5490 pendingRequest.blob_request = blob_request;
5491 pendingRequest.timestamp = 0;
5492 pendingRequest.bUrgentReceived = 0;
5493 if (request->input_buffer) {
5494 pendingRequest.input_buffer =
5495 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5496 *(pendingRequest.input_buffer) = *(request->input_buffer);
5497 pInputBuffer = pendingRequest.input_buffer;
5498 } else {
5499 pendingRequest.input_buffer = NULL;
5500 pInputBuffer = NULL;
5501 }
5502
5503 pendingRequest.pipeline_depth = 0;
5504 pendingRequest.partial_result_cnt = 0;
5505 extractJpegMetadata(mCurJpegMeta, request);
5506 pendingRequest.jpegMetadata = mCurJpegMeta;
5507 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5508 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005509 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005510 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5511 mHybridAeEnable =
5512 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5513 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005514
5515 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5516 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005517 /* DevCamDebug metadata processCaptureRequest */
5518 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5519 mDevCamDebugMetaEnable =
5520 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5521 }
5522 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5523 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005524
5525 //extract CAC info
5526 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5527 mCacMode =
5528 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5529 }
5530 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005531 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005532
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005533 // extract enableZsl info
5534 if (gExposeEnableZslKey) {
5535 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5536 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5537 mZslEnabled = pendingRequest.enableZsl;
5538 } else {
5539 pendingRequest.enableZsl = mZslEnabled;
5540 }
5541 }
5542
Thierry Strudel3d639192016-09-09 11:52:26 -07005543 PendingBuffersInRequest bufsForCurRequest;
5544 bufsForCurRequest.frame_number = frameNumber;
5545 // Mark current timestamp for the new request
5546 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005547 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005548
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005549 if (hdrPlusRequest) {
5550 // Save settings for this request.
5551 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5552 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5553
5554 // Add to pending HDR+ request queue.
5555 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5556 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5557
5558 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5559 }
5560
Thierry Strudel3d639192016-09-09 11:52:26 -07005561 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005562 if ((request->output_buffers[i].stream->data_space ==
5563 HAL_DATASPACE_DEPTH) &&
5564 (HAL_PIXEL_FORMAT_BLOB ==
5565 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005566 continue;
5567 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005568 RequestedBufferInfo requestedBuf;
5569 memset(&requestedBuf, 0, sizeof(requestedBuf));
5570 requestedBuf.stream = request->output_buffers[i].stream;
5571 requestedBuf.buffer = NULL;
5572 pendingRequest.buffers.push_back(requestedBuf);
5573
5574 // Add to buffer handle the pending buffers list
5575 PendingBufferInfo bufferInfo;
5576 bufferInfo.buffer = request->output_buffers[i].buffer;
5577 bufferInfo.stream = request->output_buffers[i].stream;
5578 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5579 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5580 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5581 frameNumber, bufferInfo.buffer,
5582 channel->getStreamTypeMask(), bufferInfo.stream->format);
5583 }
5584 // Add this request packet into mPendingBuffersMap
5585 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5586 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5587 mPendingBuffersMap.get_num_overall_buffers());
5588
5589 latestRequest = mPendingRequestsList.insert(
5590 mPendingRequestsList.end(), pendingRequest);
5591 if(mFlush) {
5592 LOGI("mFlush is true");
5593 pthread_mutex_unlock(&mMutex);
5594 return NO_ERROR;
5595 }
5596
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005597 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5598 // channel.
5599 if (!hdrPlusRequest) {
5600 int indexUsed;
5601 // Notify metadata channel we receive a request
5602 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005603
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005604 if(request->input_buffer != NULL){
5605 LOGD("Input request, frame_number %d", frameNumber);
5606 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5607 if (NO_ERROR != rc) {
5608 LOGE("fail to set reproc parameters");
5609 pthread_mutex_unlock(&mMutex);
5610 return rc;
5611 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005612 }
5613
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005614 // Call request on other streams
5615 uint32_t streams_need_metadata = 0;
5616 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5617 for (size_t i = 0; i < request->num_output_buffers; i++) {
5618 const camera3_stream_buffer_t& output = request->output_buffers[i];
5619 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5620
5621 if (channel == NULL) {
5622 LOGW("invalid channel pointer for stream");
5623 continue;
5624 }
5625
5626 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5627 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5628 output.buffer, request->input_buffer, frameNumber);
5629 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005630 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005631 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5632 if (rc < 0) {
5633 LOGE("Fail to request on picture channel");
5634 pthread_mutex_unlock(&mMutex);
5635 return rc;
5636 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005637 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005638 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5639 assert(NULL != mDepthChannel);
5640 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005641
Emilian Peev7650c122017-01-19 08:24:33 -08005642 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5643 if (rc < 0) {
5644 LOGE("Fail to map on depth buffer");
5645 pthread_mutex_unlock(&mMutex);
5646 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005647 }
Emilian Peev7650c122017-01-19 08:24:33 -08005648 } else {
5649 LOGD("snapshot request with buffer %p, frame_number %d",
5650 output.buffer, frameNumber);
5651 if (!request->settings) {
5652 rc = channel->request(output.buffer, frameNumber,
5653 NULL, mPrevParameters, indexUsed);
5654 } else {
5655 rc = channel->request(output.buffer, frameNumber,
5656 NULL, mParameters, indexUsed);
5657 }
5658 if (rc < 0) {
5659 LOGE("Fail to request on picture channel");
5660 pthread_mutex_unlock(&mMutex);
5661 return rc;
5662 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005663
Emilian Peev7650c122017-01-19 08:24:33 -08005664 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5665 uint32_t j = 0;
5666 for (j = 0; j < streamsArray.num_streams; j++) {
5667 if (streamsArray.stream_request[j].streamID == streamId) {
5668 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5669 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5670 else
5671 streamsArray.stream_request[j].buf_index = indexUsed;
5672 break;
5673 }
5674 }
5675 if (j == streamsArray.num_streams) {
5676 LOGE("Did not find matching stream to update index");
5677 assert(0);
5678 }
5679
5680 pendingBufferIter->need_metadata = true;
5681 streams_need_metadata++;
5682 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005683 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005684 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5685 bool needMetadata = false;
5686 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5687 rc = yuvChannel->request(output.buffer, frameNumber,
5688 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5689 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005690 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005691 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005692 pthread_mutex_unlock(&mMutex);
5693 return rc;
5694 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005695
5696 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5697 uint32_t j = 0;
5698 for (j = 0; j < streamsArray.num_streams; j++) {
5699 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005700 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5701 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5702 else
5703 streamsArray.stream_request[j].buf_index = indexUsed;
5704 break;
5705 }
5706 }
5707 if (j == streamsArray.num_streams) {
5708 LOGE("Did not find matching stream to update index");
5709 assert(0);
5710 }
5711
5712 pendingBufferIter->need_metadata = needMetadata;
5713 if (needMetadata)
5714 streams_need_metadata += 1;
5715 LOGD("calling YUV channel request, need_metadata is %d",
5716 needMetadata);
5717 } else {
5718 LOGD("request with buffer %p, frame_number %d",
5719 output.buffer, frameNumber);
5720
5721 rc = channel->request(output.buffer, frameNumber, indexUsed);
5722
5723 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5724 uint32_t j = 0;
5725 for (j = 0; j < streamsArray.num_streams; j++) {
5726 if (streamsArray.stream_request[j].streamID == streamId) {
5727 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5728 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5729 else
5730 streamsArray.stream_request[j].buf_index = indexUsed;
5731 break;
5732 }
5733 }
5734 if (j == streamsArray.num_streams) {
5735 LOGE("Did not find matching stream to update index");
5736 assert(0);
5737 }
5738
5739 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5740 && mBatchSize) {
5741 mToBeQueuedVidBufs++;
5742 if (mToBeQueuedVidBufs == mBatchSize) {
5743 channel->queueBatchBuf();
5744 }
5745 }
5746 if (rc < 0) {
5747 LOGE("request failed");
5748 pthread_mutex_unlock(&mMutex);
5749 return rc;
5750 }
5751 }
5752 pendingBufferIter++;
5753 }
5754
5755 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5756 itr++) {
5757 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5758
5759 if (channel == NULL) {
5760 LOGE("invalid channel pointer for stream");
5761 assert(0);
5762 return BAD_VALUE;
5763 }
5764
5765 InternalRequest requestedStream;
5766 requestedStream = (*itr);
5767
5768
5769 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5770 LOGD("snapshot request internally input buffer %p, frame_number %d",
5771 request->input_buffer, frameNumber);
5772 if(request->input_buffer != NULL){
5773 rc = channel->request(NULL, frameNumber,
5774 pInputBuffer, &mReprocMeta, indexUsed, true,
5775 requestedStream.meteringOnly);
5776 if (rc < 0) {
5777 LOGE("Fail to request on picture channel");
5778 pthread_mutex_unlock(&mMutex);
5779 return rc;
5780 }
5781 } else {
5782 LOGD("snapshot request with frame_number %d", frameNumber);
5783 if (!request->settings) {
5784 rc = channel->request(NULL, frameNumber,
5785 NULL, mPrevParameters, indexUsed, true,
5786 requestedStream.meteringOnly);
5787 } else {
5788 rc = channel->request(NULL, frameNumber,
5789 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5790 }
5791 if (rc < 0) {
5792 LOGE("Fail to request on picture channel");
5793 pthread_mutex_unlock(&mMutex);
5794 return rc;
5795 }
5796
5797 if ((*itr).meteringOnly != 1) {
5798 requestedStream.need_metadata = 1;
5799 streams_need_metadata++;
5800 }
5801 }
5802
5803 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5804 uint32_t j = 0;
5805 for (j = 0; j < streamsArray.num_streams; j++) {
5806 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005807 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5808 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5809 else
5810 streamsArray.stream_request[j].buf_index = indexUsed;
5811 break;
5812 }
5813 }
5814 if (j == streamsArray.num_streams) {
5815 LOGE("Did not find matching stream to update index");
5816 assert(0);
5817 }
5818
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005819 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005820 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005821 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005822 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005823 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005824 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005825 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005826
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005827 //If 2 streams have need_metadata set to true, fail the request, unless
5828 //we copy/reference count the metadata buffer
5829 if (streams_need_metadata > 1) {
5830 LOGE("not supporting request in which two streams requires"
5831 " 2 HAL metadata for reprocessing");
5832 pthread_mutex_unlock(&mMutex);
5833 return -EINVAL;
5834 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005835
Emilian Peev7650c122017-01-19 08:24:33 -08005836 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5837 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5838 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5839 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5840 pthread_mutex_unlock(&mMutex);
5841 return BAD_VALUE;
5842 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005843 if (request->input_buffer == NULL) {
5844 /* Set the parameters to backend:
5845 * - For every request in NORMAL MODE
5846 * - For every request in HFR mode during preview only case
5847 * - Once every batch in HFR mode during video recording
5848 */
5849 if (!mBatchSize ||
5850 (mBatchSize && !isVidBufRequested) ||
5851 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5852 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5853 mBatchSize, isVidBufRequested,
5854 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005855
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005856 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5857 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5858 uint32_t m = 0;
5859 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5860 if (streamsArray.stream_request[k].streamID ==
5861 mBatchedStreamsArray.stream_request[m].streamID)
5862 break;
5863 }
5864 if (m == mBatchedStreamsArray.num_streams) {
5865 mBatchedStreamsArray.stream_request\
5866 [mBatchedStreamsArray.num_streams].streamID =
5867 streamsArray.stream_request[k].streamID;
5868 mBatchedStreamsArray.stream_request\
5869 [mBatchedStreamsArray.num_streams].buf_index =
5870 streamsArray.stream_request[k].buf_index;
5871 mBatchedStreamsArray.num_streams =
5872 mBatchedStreamsArray.num_streams + 1;
5873 }
5874 }
5875 streamsArray = mBatchedStreamsArray;
5876 }
5877 /* Update stream id of all the requested buffers */
5878 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5879 streamsArray)) {
5880 LOGE("Failed to set stream type mask in the parameters");
5881 return BAD_VALUE;
5882 }
5883
5884 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5885 mParameters);
5886 if (rc < 0) {
5887 LOGE("set_parms failed");
5888 }
5889 /* reset to zero coz, the batch is queued */
5890 mToBeQueuedVidBufs = 0;
5891 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5892 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5893 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005894 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5895 uint32_t m = 0;
5896 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5897 if (streamsArray.stream_request[k].streamID ==
5898 mBatchedStreamsArray.stream_request[m].streamID)
5899 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005900 }
5901 if (m == mBatchedStreamsArray.num_streams) {
5902 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5903 streamID = streamsArray.stream_request[k].streamID;
5904 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5905 buf_index = streamsArray.stream_request[k].buf_index;
5906 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5907 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005908 }
5909 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005910 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005911 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005912 }
5913
5914 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5915
5916 mState = STARTED;
5917 // Added a timed condition wait
5918 struct timespec ts;
5919 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005920 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005921 if (rc < 0) {
5922 isValidTimeout = 0;
5923 LOGE("Error reading the real time clock!!");
5924 }
5925 else {
5926 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005927 int64_t timeout = 5;
5928 {
5929 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5930 // If there is a pending HDR+ request, the following requests may be blocked until the
5931 // HDR+ request is done. So allow a longer timeout.
5932 if (mHdrPlusPendingRequests.size() > 0) {
5933 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5934 }
5935 }
5936 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005937 }
5938 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005939 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005940 (mState != ERROR) && (mState != DEINIT)) {
5941 if (!isValidTimeout) {
5942 LOGD("Blocking on conditional wait");
5943 pthread_cond_wait(&mRequestCond, &mMutex);
5944 }
5945 else {
5946 LOGD("Blocking on timed conditional wait");
5947 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5948 if (rc == ETIMEDOUT) {
5949 rc = -ENODEV;
5950 LOGE("Unblocked on timeout!!!!");
5951 break;
5952 }
5953 }
5954 LOGD("Unblocked");
5955 if (mWokenUpByDaemon) {
5956 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005957 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005958 break;
5959 }
5960 }
5961 pthread_mutex_unlock(&mMutex);
5962
5963 return rc;
5964}
5965
5966/*===========================================================================
5967 * FUNCTION : dump
5968 *
5969 * DESCRIPTION:
5970 *
5971 * PARAMETERS :
5972 *
5973 *
5974 * RETURN :
5975 *==========================================================================*/
5976void QCamera3HardwareInterface::dump(int fd)
5977{
5978 pthread_mutex_lock(&mMutex);
5979 dprintf(fd, "\n Camera HAL3 information Begin \n");
5980
5981 dprintf(fd, "\nNumber of pending requests: %zu \n",
5982 mPendingRequestsList.size());
5983 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5984 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5985 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5986 for(pendingRequestIterator i = mPendingRequestsList.begin();
5987 i != mPendingRequestsList.end(); i++) {
5988 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5989 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5990 i->input_buffer);
5991 }
5992 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5993 mPendingBuffersMap.get_num_overall_buffers());
5994 dprintf(fd, "-------+------------------\n");
5995 dprintf(fd, " Frame | Stream type mask \n");
5996 dprintf(fd, "-------+------------------\n");
5997 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5998 for(auto &j : req.mPendingBufferList) {
5999 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6000 dprintf(fd, " %5d | %11d \n",
6001 req.frame_number, channel->getStreamTypeMask());
6002 }
6003 }
6004 dprintf(fd, "-------+------------------\n");
6005
6006 dprintf(fd, "\nPending frame drop list: %zu\n",
6007 mPendingFrameDropList.size());
6008 dprintf(fd, "-------+-----------\n");
6009 dprintf(fd, " Frame | Stream ID \n");
6010 dprintf(fd, "-------+-----------\n");
6011 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6012 i != mPendingFrameDropList.end(); i++) {
6013 dprintf(fd, " %5d | %9d \n",
6014 i->frame_number, i->stream_ID);
6015 }
6016 dprintf(fd, "-------+-----------\n");
6017
6018 dprintf(fd, "\n Camera HAL3 information End \n");
6019
6020 /* use dumpsys media.camera as trigger to send update debug level event */
6021 mUpdateDebugLevel = true;
6022 pthread_mutex_unlock(&mMutex);
6023 return;
6024}
6025
6026/*===========================================================================
6027 * FUNCTION : flush
6028 *
6029 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6030 * conditionally restarts channels
6031 *
6032 * PARAMETERS :
6033 * @ restartChannels: re-start all channels
6034 *
6035 *
6036 * RETURN :
6037 * 0 on success
6038 * Error code on failure
6039 *==========================================================================*/
6040int QCamera3HardwareInterface::flush(bool restartChannels)
6041{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006042 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006043 int32_t rc = NO_ERROR;
6044
6045 LOGD("Unblocking Process Capture Request");
6046 pthread_mutex_lock(&mMutex);
6047 mFlush = true;
6048 pthread_mutex_unlock(&mMutex);
6049
6050 rc = stopAllChannels();
6051 // unlink of dualcam
6052 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006053 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6054 &m_pDualCamCmdPtr->bundle_info;
6055 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006056 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6057 pthread_mutex_lock(&gCamLock);
6058
6059 if (mIsMainCamera == 1) {
6060 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6061 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006062 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006063 // related session id should be session id of linked session
6064 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6065 } else {
6066 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6067 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006068 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006069 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6070 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006071 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006072 pthread_mutex_unlock(&gCamLock);
6073
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006074 rc = mCameraHandle->ops->set_dual_cam_cmd(
6075 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006076 if (rc < 0) {
6077 LOGE("Dualcam: Unlink failed, but still proceed to close");
6078 }
6079 }
6080
6081 if (rc < 0) {
6082 LOGE("stopAllChannels failed");
6083 return rc;
6084 }
6085 if (mChannelHandle) {
6086 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6087 mChannelHandle);
6088 }
6089
6090 // Reset bundle info
6091 rc = setBundleInfo();
6092 if (rc < 0) {
6093 LOGE("setBundleInfo failed %d", rc);
6094 return rc;
6095 }
6096
6097 // Mutex Lock
6098 pthread_mutex_lock(&mMutex);
6099
6100 // Unblock process_capture_request
6101 mPendingLiveRequest = 0;
6102 pthread_cond_signal(&mRequestCond);
6103
6104 rc = notifyErrorForPendingRequests();
6105 if (rc < 0) {
6106 LOGE("notifyErrorForPendingRequests failed");
6107 pthread_mutex_unlock(&mMutex);
6108 return rc;
6109 }
6110
6111 mFlush = false;
6112
6113 // Start the Streams/Channels
6114 if (restartChannels) {
6115 rc = startAllChannels();
6116 if (rc < 0) {
6117 LOGE("startAllChannels failed");
6118 pthread_mutex_unlock(&mMutex);
6119 return rc;
6120 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006121 if (mChannelHandle) {
6122 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6123 mChannelHandle);
6124 if (rc < 0) {
6125 LOGE("start_channel failed");
6126 pthread_mutex_unlock(&mMutex);
6127 return rc;
6128 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006129 }
6130 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006131 pthread_mutex_unlock(&mMutex);
6132
6133 return 0;
6134}
6135
6136/*===========================================================================
6137 * FUNCTION : flushPerf
6138 *
6139 * DESCRIPTION: This is the performance optimization version of flush that does
6140 * not use stream off, rather flushes the system
6141 *
6142 * PARAMETERS :
6143 *
6144 *
6145 * RETURN : 0 : success
6146 * -EINVAL: input is malformed (device is not valid)
6147 * -ENODEV: if the device has encountered a serious error
6148 *==========================================================================*/
6149int QCamera3HardwareInterface::flushPerf()
6150{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006151 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006152 int32_t rc = 0;
6153 struct timespec timeout;
6154 bool timed_wait = false;
6155
6156 pthread_mutex_lock(&mMutex);
6157 mFlushPerf = true;
6158 mPendingBuffersMap.numPendingBufsAtFlush =
6159 mPendingBuffersMap.get_num_overall_buffers();
6160 LOGD("Calling flush. Wait for %d buffers to return",
6161 mPendingBuffersMap.numPendingBufsAtFlush);
6162
6163 /* send the flush event to the backend */
6164 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6165 if (rc < 0) {
6166 LOGE("Error in flush: IOCTL failure");
6167 mFlushPerf = false;
6168 pthread_mutex_unlock(&mMutex);
6169 return -ENODEV;
6170 }
6171
6172 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6173 LOGD("No pending buffers in HAL, return flush");
6174 mFlushPerf = false;
6175 pthread_mutex_unlock(&mMutex);
6176 return rc;
6177 }
6178
6179 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006180 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006181 if (rc < 0) {
6182 LOGE("Error reading the real time clock, cannot use timed wait");
6183 } else {
6184 timeout.tv_sec += FLUSH_TIMEOUT;
6185 timed_wait = true;
6186 }
6187
6188 //Block on conditional variable
6189 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6190 LOGD("Waiting on mBuffersCond");
6191 if (!timed_wait) {
6192 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6193 if (rc != 0) {
6194 LOGE("pthread_cond_wait failed due to rc = %s",
6195 strerror(rc));
6196 break;
6197 }
6198 } else {
6199 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6200 if (rc != 0) {
6201 LOGE("pthread_cond_timedwait failed due to rc = %s",
6202 strerror(rc));
6203 break;
6204 }
6205 }
6206 }
6207 if (rc != 0) {
6208 mFlushPerf = false;
6209 pthread_mutex_unlock(&mMutex);
6210 return -ENODEV;
6211 }
6212
6213 LOGD("Received buffers, now safe to return them");
6214
6215 //make sure the channels handle flush
6216 //currently only required for the picture channel to release snapshot resources
6217 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6218 it != mStreamInfo.end(); it++) {
6219 QCamera3Channel *channel = (*it)->channel;
6220 if (channel) {
6221 rc = channel->flush();
6222 if (rc) {
6223 LOGE("Flushing the channels failed with error %d", rc);
6224 // even though the channel flush failed we need to continue and
6225 // return the buffers we have to the framework, however the return
6226 // value will be an error
6227 rc = -ENODEV;
6228 }
6229 }
6230 }
6231
6232 /* notify the frameworks and send errored results */
6233 rc = notifyErrorForPendingRequests();
6234 if (rc < 0) {
6235 LOGE("notifyErrorForPendingRequests failed");
6236 pthread_mutex_unlock(&mMutex);
6237 return rc;
6238 }
6239
6240 //unblock process_capture_request
6241 mPendingLiveRequest = 0;
6242 unblockRequestIfNecessary();
6243
6244 mFlushPerf = false;
6245 pthread_mutex_unlock(&mMutex);
6246 LOGD ("Flush Operation complete. rc = %d", rc);
6247 return rc;
6248}
6249
6250/*===========================================================================
6251 * FUNCTION : handleCameraDeviceError
6252 *
6253 * DESCRIPTION: This function calls internal flush and notifies the error to
6254 * framework and updates the state variable.
6255 *
6256 * PARAMETERS : None
6257 *
6258 * RETURN : NO_ERROR on Success
6259 * Error code on failure
6260 *==========================================================================*/
6261int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6262{
6263 int32_t rc = NO_ERROR;
6264
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006265 {
6266 Mutex::Autolock lock(mFlushLock);
6267 pthread_mutex_lock(&mMutex);
6268 if (mState != ERROR) {
6269 //if mState != ERROR, nothing to be done
6270 pthread_mutex_unlock(&mMutex);
6271 return NO_ERROR;
6272 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006273 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006274
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006275 rc = flush(false /* restart channels */);
6276 if (NO_ERROR != rc) {
6277 LOGE("internal flush to handle mState = ERROR failed");
6278 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006279
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006280 pthread_mutex_lock(&mMutex);
6281 mState = DEINIT;
6282 pthread_mutex_unlock(&mMutex);
6283 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006284
6285 camera3_notify_msg_t notify_msg;
6286 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6287 notify_msg.type = CAMERA3_MSG_ERROR;
6288 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6289 notify_msg.message.error.error_stream = NULL;
6290 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006291 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006292
6293 return rc;
6294}
6295
6296/*===========================================================================
6297 * FUNCTION : captureResultCb
6298 *
6299 * DESCRIPTION: Callback handler for all capture result
6300 * (streams, as well as metadata)
6301 *
6302 * PARAMETERS :
6303 * @metadata : metadata information
6304 * @buffer : actual gralloc buffer to be returned to frameworks.
6305 * NULL if metadata.
6306 *
6307 * RETURN : NONE
6308 *==========================================================================*/
6309void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6310 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6311{
6312 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006313 pthread_mutex_lock(&mMutex);
6314 uint8_t batchSize = mBatchSize;
6315 pthread_mutex_unlock(&mMutex);
6316 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006317 handleBatchMetadata(metadata_buf,
6318 true /* free_and_bufdone_meta_buf */);
6319 } else { /* mBatchSize = 0 */
6320 hdrPlusPerfLock(metadata_buf);
6321 pthread_mutex_lock(&mMutex);
6322 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006323 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006324 true /* last urgent frame of batch metadata */,
6325 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006326 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006327 pthread_mutex_unlock(&mMutex);
6328 }
6329 } else if (isInputBuffer) {
6330 pthread_mutex_lock(&mMutex);
6331 handleInputBufferWithLock(frame_number);
6332 pthread_mutex_unlock(&mMutex);
6333 } else {
6334 pthread_mutex_lock(&mMutex);
6335 handleBufferWithLock(buffer, frame_number);
6336 pthread_mutex_unlock(&mMutex);
6337 }
6338 return;
6339}
6340
6341/*===========================================================================
6342 * FUNCTION : getReprocessibleOutputStreamId
6343 *
6344 * DESCRIPTION: Get source output stream id for the input reprocess stream
6345 * based on size and format, which would be the largest
6346 * output stream if an input stream exists.
6347 *
6348 * PARAMETERS :
6349 * @id : return the stream id if found
6350 *
6351 * RETURN : int32_t type of status
6352 * NO_ERROR -- success
6353 * none-zero failure code
6354 *==========================================================================*/
6355int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6356{
6357 /* check if any output or bidirectional stream with the same size and format
6358 and return that stream */
6359 if ((mInputStreamInfo.dim.width > 0) &&
6360 (mInputStreamInfo.dim.height > 0)) {
6361 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6362 it != mStreamInfo.end(); it++) {
6363
6364 camera3_stream_t *stream = (*it)->stream;
6365 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6366 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6367 (stream->format == mInputStreamInfo.format)) {
6368 // Usage flag for an input stream and the source output stream
6369 // may be different.
6370 LOGD("Found reprocessible output stream! %p", *it);
6371 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6372 stream->usage, mInputStreamInfo.usage);
6373
6374 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6375 if (channel != NULL && channel->mStreams[0]) {
6376 id = channel->mStreams[0]->getMyServerID();
6377 return NO_ERROR;
6378 }
6379 }
6380 }
6381 } else {
6382 LOGD("No input stream, so no reprocessible output stream");
6383 }
6384 return NAME_NOT_FOUND;
6385}
6386
6387/*===========================================================================
6388 * FUNCTION : lookupFwkName
6389 *
6390 * DESCRIPTION: In case the enum is not same in fwk and backend
6391 * make sure the parameter is correctly propogated
6392 *
6393 * PARAMETERS :
6394 * @arr : map between the two enums
6395 * @len : len of the map
6396 * @hal_name : name of the hal_parm to map
6397 *
6398 * RETURN : int type of status
6399 * fwk_name -- success
6400 * none-zero failure code
6401 *==========================================================================*/
6402template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6403 size_t len, halType hal_name)
6404{
6405
6406 for (size_t i = 0; i < len; i++) {
6407 if (arr[i].hal_name == hal_name) {
6408 return arr[i].fwk_name;
6409 }
6410 }
6411
6412 /* Not able to find matching framework type is not necessarily
6413 * an error case. This happens when mm-camera supports more attributes
6414 * than the frameworks do */
6415 LOGH("Cannot find matching framework type");
6416 return NAME_NOT_FOUND;
6417}
6418
6419/*===========================================================================
6420 * FUNCTION : lookupHalName
6421 *
6422 * DESCRIPTION: In case the enum is not same in fwk and backend
6423 * make sure the parameter is correctly propogated
6424 *
6425 * PARAMETERS :
6426 * @arr : map between the two enums
6427 * @len : len of the map
6428 * @fwk_name : name of the hal_parm to map
6429 *
6430 * RETURN : int32_t type of status
6431 * hal_name -- success
6432 * none-zero failure code
6433 *==========================================================================*/
6434template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6435 size_t len, fwkType fwk_name)
6436{
6437 for (size_t i = 0; i < len; i++) {
6438 if (arr[i].fwk_name == fwk_name) {
6439 return arr[i].hal_name;
6440 }
6441 }
6442
6443 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6444 return NAME_NOT_FOUND;
6445}
6446
6447/*===========================================================================
6448 * FUNCTION : lookupProp
6449 *
6450 * DESCRIPTION: lookup a value by its name
6451 *
6452 * PARAMETERS :
6453 * @arr : map between the two enums
6454 * @len : size of the map
6455 * @name : name to be looked up
6456 *
6457 * RETURN : Value if found
6458 * CAM_CDS_MODE_MAX if not found
6459 *==========================================================================*/
6460template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6461 size_t len, const char *name)
6462{
6463 if (name) {
6464 for (size_t i = 0; i < len; i++) {
6465 if (!strcmp(arr[i].desc, name)) {
6466 return arr[i].val;
6467 }
6468 }
6469 }
6470 return CAM_CDS_MODE_MAX;
6471}
6472
6473/*===========================================================================
6474 *
6475 * DESCRIPTION:
6476 *
6477 * PARAMETERS :
6478 * @metadata : metadata information from callback
6479 * @timestamp: metadata buffer timestamp
6480 * @request_id: request id
6481 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006482 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006483 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6484 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006485 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006486 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6487 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006488 *
6489 * RETURN : camera_metadata_t*
6490 * metadata in a format specified by fwk
6491 *==========================================================================*/
6492camera_metadata_t*
6493QCamera3HardwareInterface::translateFromHalMetadata(
6494 metadata_buffer_t *metadata,
6495 nsecs_t timestamp,
6496 int32_t request_id,
6497 const CameraMetadata& jpegMetadata,
6498 uint8_t pipeline_depth,
6499 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006500 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006501 /* DevCamDebug metadata translateFromHalMetadata argument */
6502 uint8_t DevCamDebug_meta_enable,
6503 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006504 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006505 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006506 bool lastMetadataInBatch,
6507 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006508{
6509 CameraMetadata camMetadata;
6510 camera_metadata_t *resultMetadata;
6511
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006512 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006513 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6514 * Timestamp is needed because it's used for shutter notify calculation.
6515 * */
6516 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6517 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006518 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006519 }
6520
Thierry Strudel3d639192016-09-09 11:52:26 -07006521 if (jpegMetadata.entryCount())
6522 camMetadata.append(jpegMetadata);
6523
6524 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6525 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6526 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6527 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006528 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006529 if (mBatchSize == 0) {
6530 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6531 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6532 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006533
Samuel Ha68ba5172016-12-15 18:41:12 -08006534 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6535 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6536 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6537 // DevCamDebug metadata translateFromHalMetadata AF
6538 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6539 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6540 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6541 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6542 }
6543 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6544 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6545 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6546 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6547 }
6548 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6549 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6550 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6551 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6552 }
6553 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6554 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6555 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6556 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6557 }
6558 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6559 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6560 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6561 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6562 }
6563 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6564 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6565 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6566 *DevCamDebug_af_monitor_pdaf_target_pos;
6567 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6568 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6569 }
6570 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6571 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6572 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6573 *DevCamDebug_af_monitor_pdaf_confidence;
6574 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6575 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6576 }
6577 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6578 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6579 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6580 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6581 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6582 }
6583 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6584 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6585 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6586 *DevCamDebug_af_monitor_tof_target_pos;
6587 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6588 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6589 }
6590 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6591 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6592 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6593 *DevCamDebug_af_monitor_tof_confidence;
6594 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6595 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6596 }
6597 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6598 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6599 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6600 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6601 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6602 }
6603 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6604 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6605 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6606 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6607 &fwk_DevCamDebug_af_monitor_type_select, 1);
6608 }
6609 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6610 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6611 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6612 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6613 &fwk_DevCamDebug_af_monitor_refocus, 1);
6614 }
6615 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6616 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6617 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6618 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6619 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6620 }
6621 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6622 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6623 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6624 *DevCamDebug_af_search_pdaf_target_pos;
6625 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6626 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6627 }
6628 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6629 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6630 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6631 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6632 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6633 }
6634 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6635 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6636 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6637 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6638 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6639 }
6640 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6641 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6642 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6643 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6644 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6645 }
6646 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6647 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6648 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6649 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6650 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6651 }
6652 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6653 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6654 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6655 *DevCamDebug_af_search_tof_target_pos;
6656 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6657 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6658 }
6659 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6660 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6661 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6662 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6663 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6664 }
6665 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6666 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6667 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6668 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6669 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6670 }
6671 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6672 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6673 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6674 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6675 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6676 }
6677 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6678 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6679 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6680 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6681 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6682 }
6683 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6684 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6685 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6686 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6687 &fwk_DevCamDebug_af_search_type_select, 1);
6688 }
6689 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6690 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6691 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6692 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6693 &fwk_DevCamDebug_af_search_next_pos, 1);
6694 }
6695 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6696 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6697 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6698 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6699 &fwk_DevCamDebug_af_search_target_pos, 1);
6700 }
6701 // DevCamDebug metadata translateFromHalMetadata AEC
6702 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6703 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6704 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6705 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6706 }
6707 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6708 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6709 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6710 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6711 }
6712 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6713 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6714 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6715 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6716 }
6717 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6718 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6719 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6720 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6721 }
6722 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6723 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6724 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6725 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6726 }
6727 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6728 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6729 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6730 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6731 }
6732 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6733 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6734 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6735 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6736 }
6737 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6738 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6739 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6740 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6741 }
Samuel Ha34229982017-02-17 13:51:11 -08006742 // DevCamDebug metadata translateFromHalMetadata zzHDR
6743 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6744 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6745 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6746 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6747 }
6748 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6749 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006750 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006751 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6752 }
6753 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6754 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6755 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6756 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6757 }
6758 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6759 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006760 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006761 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6762 }
6763 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6764 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6765 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6766 *DevCamDebug_aec_hdr_sensitivity_ratio;
6767 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6768 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6769 }
6770 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6771 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6772 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6773 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6774 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6775 }
6776 // DevCamDebug metadata translateFromHalMetadata ADRC
6777 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6778 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6779 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6780 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6781 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6782 }
6783 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6784 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6785 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6786 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6787 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6788 }
6789 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6790 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6791 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6792 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6793 }
6794 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6795 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6796 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6797 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6798 }
6799 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6800 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6801 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6802 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6803 }
6804 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6805 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6806 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6807 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6808 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006809 // DevCamDebug metadata translateFromHalMetadata AWB
6810 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6811 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6812 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6813 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6814 }
6815 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6816 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6817 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6818 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6819 }
6820 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6821 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6822 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6823 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6824 }
6825 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6826 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6827 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6828 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6829 }
6830 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6831 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6832 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6833 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6834 }
6835 }
6836 // atrace_end(ATRACE_TAG_ALWAYS);
6837
Thierry Strudel3d639192016-09-09 11:52:26 -07006838 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6839 int64_t fwk_frame_number = *frame_number;
6840 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6841 }
6842
6843 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6844 int32_t fps_range[2];
6845 fps_range[0] = (int32_t)float_range->min_fps;
6846 fps_range[1] = (int32_t)float_range->max_fps;
6847 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6848 fps_range, 2);
6849 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6850 fps_range[0], fps_range[1]);
6851 }
6852
6853 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6854 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6855 }
6856
6857 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6858 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6859 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6860 *sceneMode);
6861 if (NAME_NOT_FOUND != val) {
6862 uint8_t fwkSceneMode = (uint8_t)val;
6863 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6864 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6865 fwkSceneMode);
6866 }
6867 }
6868
6869 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6870 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6871 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6872 }
6873
6874 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6875 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6876 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6877 }
6878
6879 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6880 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6881 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6882 }
6883
6884 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6885 CAM_INTF_META_EDGE_MODE, metadata) {
6886 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6887 }
6888
6889 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6890 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6891 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6892 }
6893
6894 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6895 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6896 }
6897
6898 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6899 if (0 <= *flashState) {
6900 uint8_t fwk_flashState = (uint8_t) *flashState;
6901 if (!gCamCapability[mCameraId]->flash_available) {
6902 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6903 }
6904 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6905 }
6906 }
6907
6908 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6909 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6910 if (NAME_NOT_FOUND != val) {
6911 uint8_t fwk_flashMode = (uint8_t)val;
6912 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6913 }
6914 }
6915
6916 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6917 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6918 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6919 }
6920
6921 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6922 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6923 }
6924
6925 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6926 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6927 }
6928
6929 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6930 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6931 }
6932
6933 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6934 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6935 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6936 }
6937
6938 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6939 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6940 LOGD("fwk_videoStab = %d", fwk_videoStab);
6941 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6942 } else {
6943 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6944 // and so hardcoding the Video Stab result to OFF mode.
6945 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6946 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006947 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006948 }
6949
6950 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6951 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6952 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6953 }
6954
6955 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6956 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6957 }
6958
Thierry Strudel3d639192016-09-09 11:52:26 -07006959 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6960 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006961 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006962
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006963 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6964 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006965
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006966 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006967 blackLevelAppliedPattern->cam_black_level[0],
6968 blackLevelAppliedPattern->cam_black_level[1],
6969 blackLevelAppliedPattern->cam_black_level[2],
6970 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006971 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6972 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006973
6974#ifndef USE_HAL_3_3
6975 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05306976 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07006977 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05306978 fwk_blackLevelInd[0] /= 16.0;
6979 fwk_blackLevelInd[1] /= 16.0;
6980 fwk_blackLevelInd[2] /= 16.0;
6981 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006982 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6983 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006984#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006985 }
6986
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006987#ifndef USE_HAL_3_3
6988 // Fixed whitelevel is used by ISP/Sensor
6989 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6990 &gCamCapability[mCameraId]->white_level, 1);
6991#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006992
6993 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6994 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6995 int32_t scalerCropRegion[4];
6996 scalerCropRegion[0] = hScalerCropRegion->left;
6997 scalerCropRegion[1] = hScalerCropRegion->top;
6998 scalerCropRegion[2] = hScalerCropRegion->width;
6999 scalerCropRegion[3] = hScalerCropRegion->height;
7000
7001 // Adjust crop region from sensor output coordinate system to active
7002 // array coordinate system.
7003 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7004 scalerCropRegion[2], scalerCropRegion[3]);
7005
7006 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7007 }
7008
7009 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7010 LOGD("sensorExpTime = %lld", *sensorExpTime);
7011 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7012 }
7013
7014 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7015 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7016 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7017 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7018 }
7019
7020 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7021 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7022 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7023 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7024 sensorRollingShutterSkew, 1);
7025 }
7026
7027 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7028 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7029 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7030
7031 //calculate the noise profile based on sensitivity
7032 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7033 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7034 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7035 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7036 noise_profile[i] = noise_profile_S;
7037 noise_profile[i+1] = noise_profile_O;
7038 }
7039 LOGD("noise model entry (S, O) is (%f, %f)",
7040 noise_profile_S, noise_profile_O);
7041 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7042 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7043 }
7044
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007045#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007046 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007047 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007048 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007049 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007050 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7051 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7052 }
7053 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007054#endif
7055
Thierry Strudel3d639192016-09-09 11:52:26 -07007056 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7057 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7058 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7059 }
7060
7061 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7062 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7063 *faceDetectMode);
7064 if (NAME_NOT_FOUND != val) {
7065 uint8_t fwk_faceDetectMode = (uint8_t)val;
7066 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7067
7068 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7069 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7070 CAM_INTF_META_FACE_DETECTION, metadata) {
7071 uint8_t numFaces = MIN(
7072 faceDetectionInfo->num_faces_detected, MAX_ROI);
7073 int32_t faceIds[MAX_ROI];
7074 uint8_t faceScores[MAX_ROI];
7075 int32_t faceRectangles[MAX_ROI * 4];
7076 int32_t faceLandmarks[MAX_ROI * 6];
7077 size_t j = 0, k = 0;
7078
7079 for (size_t i = 0; i < numFaces; i++) {
7080 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7081 // Adjust crop region from sensor output coordinate system to active
7082 // array coordinate system.
7083 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7084 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7085 rect.width, rect.height);
7086
7087 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7088 faceRectangles+j, -1);
7089
Jason Lee8ce36fa2017-04-19 19:40:37 -07007090 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7091 "bottom-right (%d, %d)",
7092 faceDetectionInfo->frame_id, i,
7093 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7094 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7095
Thierry Strudel3d639192016-09-09 11:52:26 -07007096 j+= 4;
7097 }
7098 if (numFaces <= 0) {
7099 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7100 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7101 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7102 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7103 }
7104
7105 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7106 numFaces);
7107 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7108 faceRectangles, numFaces * 4U);
7109 if (fwk_faceDetectMode ==
7110 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7111 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7112 CAM_INTF_META_FACE_LANDMARK, metadata) {
7113
7114 for (size_t i = 0; i < numFaces; i++) {
7115 // Map the co-ordinate sensor output coordinate system to active
7116 // array coordinate system.
7117 mCropRegionMapper.toActiveArray(
7118 landmarks->face_landmarks[i].left_eye_center.x,
7119 landmarks->face_landmarks[i].left_eye_center.y);
7120 mCropRegionMapper.toActiveArray(
7121 landmarks->face_landmarks[i].right_eye_center.x,
7122 landmarks->face_landmarks[i].right_eye_center.y);
7123 mCropRegionMapper.toActiveArray(
7124 landmarks->face_landmarks[i].mouth_center.x,
7125 landmarks->face_landmarks[i].mouth_center.y);
7126
7127 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007128
7129 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7130 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7131 faceDetectionInfo->frame_id, i,
7132 faceLandmarks[k + LEFT_EYE_X],
7133 faceLandmarks[k + LEFT_EYE_Y],
7134 faceLandmarks[k + RIGHT_EYE_X],
7135 faceLandmarks[k + RIGHT_EYE_Y],
7136 faceLandmarks[k + MOUTH_X],
7137 faceLandmarks[k + MOUTH_Y]);
7138
Thierry Strudel04e026f2016-10-10 11:27:36 -07007139 k+= TOTAL_LANDMARK_INDICES;
7140 }
7141 } else {
7142 for (size_t i = 0; i < numFaces; i++) {
7143 setInvalidLandmarks(faceLandmarks+k);
7144 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007145 }
7146 }
7147
Jason Lee49619db2017-04-13 12:07:22 -07007148 for (size_t i = 0; i < numFaces; i++) {
7149 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7150
7151 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7152 faceDetectionInfo->frame_id, i, faceIds[i]);
7153 }
7154
Thierry Strudel3d639192016-09-09 11:52:26 -07007155 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7156 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7157 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007158 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007159 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7160 CAM_INTF_META_FACE_BLINK, metadata) {
7161 uint8_t detected[MAX_ROI];
7162 uint8_t degree[MAX_ROI * 2];
7163 for (size_t i = 0; i < numFaces; i++) {
7164 detected[i] = blinks->blink[i].blink_detected;
7165 degree[2 * i] = blinks->blink[i].left_blink;
7166 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007167
Jason Lee49619db2017-04-13 12:07:22 -07007168 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7169 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7170 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7171 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007172 }
7173 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7174 detected, numFaces);
7175 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7176 degree, numFaces * 2);
7177 }
7178 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7179 CAM_INTF_META_FACE_SMILE, metadata) {
7180 uint8_t degree[MAX_ROI];
7181 uint8_t confidence[MAX_ROI];
7182 for (size_t i = 0; i < numFaces; i++) {
7183 degree[i] = smiles->smile[i].smile_degree;
7184 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007185
Jason Lee49619db2017-04-13 12:07:22 -07007186 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7187 "smile_degree=%d, smile_score=%d",
7188 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007189 }
7190 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7191 degree, numFaces);
7192 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7193 confidence, numFaces);
7194 }
7195 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7196 CAM_INTF_META_FACE_GAZE, metadata) {
7197 int8_t angle[MAX_ROI];
7198 int32_t direction[MAX_ROI * 3];
7199 int8_t degree[MAX_ROI * 2];
7200 for (size_t i = 0; i < numFaces; i++) {
7201 angle[i] = gazes->gaze[i].gaze_angle;
7202 direction[3 * i] = gazes->gaze[i].updown_dir;
7203 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7204 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7205 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7206 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007207
7208 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7209 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7210 "left_right_gaze=%d, top_bottom_gaze=%d",
7211 faceDetectionInfo->frame_id, i, angle[i],
7212 direction[3 * i], direction[3 * i + 1],
7213 direction[3 * i + 2],
7214 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007215 }
7216 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7217 (uint8_t *)angle, numFaces);
7218 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7219 direction, numFaces * 3);
7220 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7221 (uint8_t *)degree, numFaces * 2);
7222 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007223 }
7224 }
7225 }
7226 }
7227
7228 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7229 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007230 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007231 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007232 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007233
Shuzhen Wang14415f52016-11-16 18:26:18 -08007234 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7235 histogramBins = *histBins;
7236 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7237 }
7238
7239 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007240 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7241 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007242 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007243
7244 switch (stats_data->type) {
7245 case CAM_HISTOGRAM_TYPE_BAYER:
7246 switch (stats_data->bayer_stats.data_type) {
7247 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007248 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7249 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007250 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007251 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7252 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007253 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007254 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7255 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007256 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007257 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007258 case CAM_STATS_CHANNEL_R:
7259 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007260 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7261 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007262 }
7263 break;
7264 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007265 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007266 break;
7267 }
7268
Shuzhen Wang14415f52016-11-16 18:26:18 -08007269 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007270 }
7271 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007272 }
7273
7274 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7275 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7276 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7277 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7278 }
7279
7280 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7281 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7282 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7283 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7284 }
7285
7286 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7287 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7288 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7289 CAM_MAX_SHADING_MAP_HEIGHT);
7290 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7291 CAM_MAX_SHADING_MAP_WIDTH);
7292 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7293 lensShadingMap->lens_shading, 4U * map_width * map_height);
7294 }
7295
7296 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7297 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7298 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7299 }
7300
7301 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7302 //Populate CAM_INTF_META_TONEMAP_CURVES
7303 /* ch0 = G, ch 1 = B, ch 2 = R*/
7304 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7305 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7306 tonemap->tonemap_points_cnt,
7307 CAM_MAX_TONEMAP_CURVE_SIZE);
7308 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7309 }
7310
7311 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7312 &tonemap->curves[0].tonemap_points[0][0],
7313 tonemap->tonemap_points_cnt * 2);
7314
7315 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7316 &tonemap->curves[1].tonemap_points[0][0],
7317 tonemap->tonemap_points_cnt * 2);
7318
7319 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7320 &tonemap->curves[2].tonemap_points[0][0],
7321 tonemap->tonemap_points_cnt * 2);
7322 }
7323
7324 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7325 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7326 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7327 CC_GAIN_MAX);
7328 }
7329
7330 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7331 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7332 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7333 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7334 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7335 }
7336
7337 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7338 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7339 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7340 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7341 toneCurve->tonemap_points_cnt,
7342 CAM_MAX_TONEMAP_CURVE_SIZE);
7343 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7344 }
7345 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7346 (float*)toneCurve->curve.tonemap_points,
7347 toneCurve->tonemap_points_cnt * 2);
7348 }
7349
7350 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7351 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7352 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7353 predColorCorrectionGains->gains, 4);
7354 }
7355
7356 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7357 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7358 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7359 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7360 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7361 }
7362
7363 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7364 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7365 }
7366
7367 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7368 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7369 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7370 }
7371
7372 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7373 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7374 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7375 }
7376
7377 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7378 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7379 *effectMode);
7380 if (NAME_NOT_FOUND != val) {
7381 uint8_t fwk_effectMode = (uint8_t)val;
7382 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7383 }
7384 }
7385
7386 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7387 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7388 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7389 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7390 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7391 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7392 }
7393 int32_t fwk_testPatternData[4];
7394 fwk_testPatternData[0] = testPatternData->r;
7395 fwk_testPatternData[3] = testPatternData->b;
7396 switch (gCamCapability[mCameraId]->color_arrangement) {
7397 case CAM_FILTER_ARRANGEMENT_RGGB:
7398 case CAM_FILTER_ARRANGEMENT_GRBG:
7399 fwk_testPatternData[1] = testPatternData->gr;
7400 fwk_testPatternData[2] = testPatternData->gb;
7401 break;
7402 case CAM_FILTER_ARRANGEMENT_GBRG:
7403 case CAM_FILTER_ARRANGEMENT_BGGR:
7404 fwk_testPatternData[2] = testPatternData->gr;
7405 fwk_testPatternData[1] = testPatternData->gb;
7406 break;
7407 default:
7408 LOGE("color arrangement %d is not supported",
7409 gCamCapability[mCameraId]->color_arrangement);
7410 break;
7411 }
7412 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7413 }
7414
7415 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7416 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7417 }
7418
7419 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7420 String8 str((const char *)gps_methods);
7421 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7422 }
7423
7424 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7425 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7426 }
7427
7428 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7429 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7430 }
7431
7432 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7433 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7434 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7435 }
7436
7437 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7438 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7439 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7440 }
7441
7442 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7443 int32_t fwk_thumb_size[2];
7444 fwk_thumb_size[0] = thumb_size->width;
7445 fwk_thumb_size[1] = thumb_size->height;
7446 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7447 }
7448
7449 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7450 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7451 privateData,
7452 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7453 }
7454
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007455 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007456 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007457 meteringMode, 1);
7458 }
7459
Thierry Strudel54dc9782017-02-15 12:12:10 -08007460 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7461 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7462 LOGD("hdr_scene_data: %d %f\n",
7463 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7464 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7465 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7466 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7467 &isHdr, 1);
7468 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7469 &isHdrConfidence, 1);
7470 }
7471
7472
7473
Thierry Strudel3d639192016-09-09 11:52:26 -07007474 if (metadata->is_tuning_params_valid) {
7475 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7476 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7477 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7478
7479
7480 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7481 sizeof(uint32_t));
7482 data += sizeof(uint32_t);
7483
7484 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7485 sizeof(uint32_t));
7486 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7487 data += sizeof(uint32_t);
7488
7489 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7490 sizeof(uint32_t));
7491 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7492 data += sizeof(uint32_t);
7493
7494 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7495 sizeof(uint32_t));
7496 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7497 data += sizeof(uint32_t);
7498
7499 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7500 sizeof(uint32_t));
7501 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7502 data += sizeof(uint32_t);
7503
7504 metadata->tuning_params.tuning_mod3_data_size = 0;
7505 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7506 sizeof(uint32_t));
7507 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7508 data += sizeof(uint32_t);
7509
7510 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7511 TUNING_SENSOR_DATA_MAX);
7512 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7513 count);
7514 data += count;
7515
7516 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7517 TUNING_VFE_DATA_MAX);
7518 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7519 count);
7520 data += count;
7521
7522 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7523 TUNING_CPP_DATA_MAX);
7524 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7525 count);
7526 data += count;
7527
7528 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7529 TUNING_CAC_DATA_MAX);
7530 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7531 count);
7532 data += count;
7533
7534 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7535 (int32_t *)(void *)tuning_meta_data_blob,
7536 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7537 }
7538
7539 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7540 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7541 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7542 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7543 NEUTRAL_COL_POINTS);
7544 }
7545
7546 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7547 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7548 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7549 }
7550
7551 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7552 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7553 // Adjust crop region from sensor output coordinate system to active
7554 // array coordinate system.
7555 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7556 hAeRegions->rect.width, hAeRegions->rect.height);
7557
7558 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7559 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7560 REGIONS_TUPLE_COUNT);
7561 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7562 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7563 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7564 hAeRegions->rect.height);
7565 }
7566
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007567 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7568 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7569 if (NAME_NOT_FOUND != val) {
7570 uint8_t fwkAfMode = (uint8_t)val;
7571 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7572 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7573 } else {
7574 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7575 val);
7576 }
7577 }
7578
Thierry Strudel3d639192016-09-09 11:52:26 -07007579 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7580 uint8_t fwk_afState = (uint8_t) *afState;
7581 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007582 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007583 }
7584
7585 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7586 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7587 }
7588
7589 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7590 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7591 }
7592
7593 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7594 uint8_t fwk_lensState = *lensState;
7595 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7596 }
7597
Thierry Strudel3d639192016-09-09 11:52:26 -07007598
7599 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007600 uint32_t ab_mode = *hal_ab_mode;
7601 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7602 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7603 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7604 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007605 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007606 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007607 if (NAME_NOT_FOUND != val) {
7608 uint8_t fwk_ab_mode = (uint8_t)val;
7609 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7610 }
7611 }
7612
7613 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7614 int val = lookupFwkName(SCENE_MODES_MAP,
7615 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7616 if (NAME_NOT_FOUND != val) {
7617 uint8_t fwkBestshotMode = (uint8_t)val;
7618 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7619 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7620 } else {
7621 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7622 }
7623 }
7624
7625 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7626 uint8_t fwk_mode = (uint8_t) *mode;
7627 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7628 }
7629
7630 /* Constant metadata values to be update*/
7631 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7632 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7633
7634 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7635 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7636
7637 int32_t hotPixelMap[2];
7638 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7639
7640 // CDS
7641 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7642 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7643 }
7644
Thierry Strudel04e026f2016-10-10 11:27:36 -07007645 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7646 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007647 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007648 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7649 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7650 } else {
7651 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7652 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007653
7654 if(fwk_hdr != curr_hdr_state) {
7655 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7656 if(fwk_hdr)
7657 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7658 else
7659 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7660 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007661 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7662 }
7663
Thierry Strudel54dc9782017-02-15 12:12:10 -08007664 //binning correction
7665 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7666 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7667 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7668 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7669 }
7670
Thierry Strudel04e026f2016-10-10 11:27:36 -07007671 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007672 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007673 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7674 int8_t is_ir_on = 0;
7675
7676 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7677 if(is_ir_on != curr_ir_state) {
7678 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7679 if(is_ir_on)
7680 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7681 else
7682 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7683 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007684 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007685 }
7686
Thierry Strudel269c81a2016-10-12 12:13:59 -07007687 // AEC SPEED
7688 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7689 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7690 }
7691
7692 // AWB SPEED
7693 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7694 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7695 }
7696
Thierry Strudel3d639192016-09-09 11:52:26 -07007697 // TNR
7698 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7699 uint8_t tnr_enable = tnr->denoise_enable;
7700 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007701 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7702 int8_t is_tnr_on = 0;
7703
7704 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7705 if(is_tnr_on != curr_tnr_state) {
7706 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7707 if(is_tnr_on)
7708 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7709 else
7710 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7711 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007712
7713 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7714 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7715 }
7716
7717 // Reprocess crop data
7718 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7719 uint8_t cnt = crop_data->num_of_streams;
7720 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7721 // mm-qcamera-daemon only posts crop_data for streams
7722 // not linked to pproc. So no valid crop metadata is not
7723 // necessarily an error case.
7724 LOGD("No valid crop metadata entries");
7725 } else {
7726 uint32_t reproc_stream_id;
7727 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7728 LOGD("No reprocessible stream found, ignore crop data");
7729 } else {
7730 int rc = NO_ERROR;
7731 Vector<int32_t> roi_map;
7732 int32_t *crop = new int32_t[cnt*4];
7733 if (NULL == crop) {
7734 rc = NO_MEMORY;
7735 }
7736 if (NO_ERROR == rc) {
7737 int32_t streams_found = 0;
7738 for (size_t i = 0; i < cnt; i++) {
7739 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7740 if (pprocDone) {
7741 // HAL already does internal reprocessing,
7742 // either via reprocessing before JPEG encoding,
7743 // or offline postprocessing for pproc bypass case.
7744 crop[0] = 0;
7745 crop[1] = 0;
7746 crop[2] = mInputStreamInfo.dim.width;
7747 crop[3] = mInputStreamInfo.dim.height;
7748 } else {
7749 crop[0] = crop_data->crop_info[i].crop.left;
7750 crop[1] = crop_data->crop_info[i].crop.top;
7751 crop[2] = crop_data->crop_info[i].crop.width;
7752 crop[3] = crop_data->crop_info[i].crop.height;
7753 }
7754 roi_map.add(crop_data->crop_info[i].roi_map.left);
7755 roi_map.add(crop_data->crop_info[i].roi_map.top);
7756 roi_map.add(crop_data->crop_info[i].roi_map.width);
7757 roi_map.add(crop_data->crop_info[i].roi_map.height);
7758 streams_found++;
7759 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7760 crop[0], crop[1], crop[2], crop[3]);
7761 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7762 crop_data->crop_info[i].roi_map.left,
7763 crop_data->crop_info[i].roi_map.top,
7764 crop_data->crop_info[i].roi_map.width,
7765 crop_data->crop_info[i].roi_map.height);
7766 break;
7767
7768 }
7769 }
7770 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7771 &streams_found, 1);
7772 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7773 crop, (size_t)(streams_found * 4));
7774 if (roi_map.array()) {
7775 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7776 roi_map.array(), roi_map.size());
7777 }
7778 }
7779 if (crop) {
7780 delete [] crop;
7781 }
7782 }
7783 }
7784 }
7785
7786 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7787 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7788 // so hardcoding the CAC result to OFF mode.
7789 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7790 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7791 } else {
7792 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7793 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7794 *cacMode);
7795 if (NAME_NOT_FOUND != val) {
7796 uint8_t resultCacMode = (uint8_t)val;
7797 // check whether CAC result from CB is equal to Framework set CAC mode
7798 // If not equal then set the CAC mode came in corresponding request
7799 if (fwk_cacMode != resultCacMode) {
7800 resultCacMode = fwk_cacMode;
7801 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007802 //Check if CAC is disabled by property
7803 if (m_cacModeDisabled) {
7804 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7805 }
7806
Thierry Strudel3d639192016-09-09 11:52:26 -07007807 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7808 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7809 } else {
7810 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7811 }
7812 }
7813 }
7814
7815 // Post blob of cam_cds_data through vendor tag.
7816 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7817 uint8_t cnt = cdsInfo->num_of_streams;
7818 cam_cds_data_t cdsDataOverride;
7819 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7820 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7821 cdsDataOverride.num_of_streams = 1;
7822 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7823 uint32_t reproc_stream_id;
7824 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7825 LOGD("No reprocessible stream found, ignore cds data");
7826 } else {
7827 for (size_t i = 0; i < cnt; i++) {
7828 if (cdsInfo->cds_info[i].stream_id ==
7829 reproc_stream_id) {
7830 cdsDataOverride.cds_info[0].cds_enable =
7831 cdsInfo->cds_info[i].cds_enable;
7832 break;
7833 }
7834 }
7835 }
7836 } else {
7837 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7838 }
7839 camMetadata.update(QCAMERA3_CDS_INFO,
7840 (uint8_t *)&cdsDataOverride,
7841 sizeof(cam_cds_data_t));
7842 }
7843
7844 // Ldaf calibration data
7845 if (!mLdafCalibExist) {
7846 IF_META_AVAILABLE(uint32_t, ldafCalib,
7847 CAM_INTF_META_LDAF_EXIF, metadata) {
7848 mLdafCalibExist = true;
7849 mLdafCalib[0] = ldafCalib[0];
7850 mLdafCalib[1] = ldafCalib[1];
7851 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7852 ldafCalib[0], ldafCalib[1]);
7853 }
7854 }
7855
Thierry Strudel54dc9782017-02-15 12:12:10 -08007856 // EXIF debug data through vendor tag
7857 /*
7858 * Mobicat Mask can assume 3 values:
7859 * 1 refers to Mobicat data,
7860 * 2 refers to Stats Debug and Exif Debug Data
7861 * 3 refers to Mobicat and Stats Debug Data
7862 * We want to make sure that we are sending Exif debug data
7863 * only when Mobicat Mask is 2.
7864 */
7865 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7866 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7867 (uint8_t *)(void *)mExifParams.debug_params,
7868 sizeof(mm_jpeg_debug_exif_params_t));
7869 }
7870
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007871 // Reprocess and DDM debug data through vendor tag
7872 cam_reprocess_info_t repro_info;
7873 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007874 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7875 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007876 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007877 }
7878 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7879 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007880 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007881 }
7882 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7883 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007884 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007885 }
7886 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7887 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007888 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007889 }
7890 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7891 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007892 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007893 }
7894 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007895 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007896 }
7897 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7898 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007899 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007900 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007901 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7902 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7903 }
7904 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7905 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7906 }
7907 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7908 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007909
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007910 // INSTANT AEC MODE
7911 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7912 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7913 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7914 }
7915
Shuzhen Wange763e802016-03-31 10:24:29 -07007916 // AF scene change
7917 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7918 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7919 }
7920
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07007921 // Enable ZSL
7922 if (enableZsl != nullptr) {
7923 uint8_t value = *enableZsl ?
7924 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
7925 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
7926 }
7927
Thierry Strudel3d639192016-09-09 11:52:26 -07007928 resultMetadata = camMetadata.release();
7929 return resultMetadata;
7930}
7931
7932/*===========================================================================
7933 * FUNCTION : saveExifParams
7934 *
7935 * DESCRIPTION:
7936 *
7937 * PARAMETERS :
7938 * @metadata : metadata information from callback
7939 *
7940 * RETURN : none
7941 *
7942 *==========================================================================*/
7943void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7944{
7945 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7946 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7947 if (mExifParams.debug_params) {
7948 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7949 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7950 }
7951 }
7952 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7953 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7954 if (mExifParams.debug_params) {
7955 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7956 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7957 }
7958 }
7959 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7960 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7961 if (mExifParams.debug_params) {
7962 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7963 mExifParams.debug_params->af_debug_params_valid = TRUE;
7964 }
7965 }
7966 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7967 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7968 if (mExifParams.debug_params) {
7969 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7970 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7971 }
7972 }
7973 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7974 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7975 if (mExifParams.debug_params) {
7976 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7977 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7978 }
7979 }
7980 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7981 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7982 if (mExifParams.debug_params) {
7983 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7984 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7985 }
7986 }
7987 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7988 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7989 if (mExifParams.debug_params) {
7990 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7991 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7992 }
7993 }
7994 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7995 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7996 if (mExifParams.debug_params) {
7997 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7998 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7999 }
8000 }
8001}
8002
8003/*===========================================================================
8004 * FUNCTION : get3AExifParams
8005 *
8006 * DESCRIPTION:
8007 *
8008 * PARAMETERS : none
8009 *
8010 *
8011 * RETURN : mm_jpeg_exif_params_t
8012 *
8013 *==========================================================================*/
8014mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8015{
8016 return mExifParams;
8017}
8018
8019/*===========================================================================
8020 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8021 *
8022 * DESCRIPTION:
8023 *
8024 * PARAMETERS :
8025 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008026 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8027 * urgent metadata in a batch. Always true for
8028 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008029 *
8030 * RETURN : camera_metadata_t*
8031 * metadata in a format specified by fwk
8032 *==========================================================================*/
8033camera_metadata_t*
8034QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008035 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008036{
8037 CameraMetadata camMetadata;
8038 camera_metadata_t *resultMetadata;
8039
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008040 if (!lastUrgentMetadataInBatch) {
8041 /* In batch mode, use empty metadata if this is not the last in batch
8042 */
8043 resultMetadata = allocate_camera_metadata(0, 0);
8044 return resultMetadata;
8045 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008046
8047 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8048 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8049 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8050 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8051 }
8052
8053 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8054 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8055 &aecTrigger->trigger, 1);
8056 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8057 &aecTrigger->trigger_id, 1);
8058 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8059 aecTrigger->trigger);
8060 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8061 aecTrigger->trigger_id);
8062 }
8063
8064 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8065 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8066 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8067 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8068 }
8069
Thierry Strudel3d639192016-09-09 11:52:26 -07008070 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8071 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8072 &af_trigger->trigger, 1);
8073 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8074 af_trigger->trigger);
8075 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8076 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8077 af_trigger->trigger_id);
8078 }
8079
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008080 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8081 /*af regions*/
8082 int32_t afRegions[REGIONS_TUPLE_COUNT];
8083 // Adjust crop region from sensor output coordinate system to active
8084 // array coordinate system.
8085 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8086 hAfRegions->rect.width, hAfRegions->rect.height);
8087
8088 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8089 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8090 REGIONS_TUPLE_COUNT);
8091 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8092 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8093 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8094 hAfRegions->rect.height);
8095 }
8096
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008097 // AF region confidence
8098 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8099 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8100 }
8101
Thierry Strudel3d639192016-09-09 11:52:26 -07008102 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8103 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8104 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8105 if (NAME_NOT_FOUND != val) {
8106 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8107 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8108 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8109 } else {
8110 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8111 }
8112 }
8113
8114 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8115 uint32_t aeMode = CAM_AE_MODE_MAX;
8116 int32_t flashMode = CAM_FLASH_MODE_MAX;
8117 int32_t redeye = -1;
8118 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8119 aeMode = *pAeMode;
8120 }
8121 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8122 flashMode = *pFlashMode;
8123 }
8124 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8125 redeye = *pRedeye;
8126 }
8127
8128 if (1 == redeye) {
8129 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8130 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8131 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8132 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8133 flashMode);
8134 if (NAME_NOT_FOUND != val) {
8135 fwk_aeMode = (uint8_t)val;
8136 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8137 } else {
8138 LOGE("Unsupported flash mode %d", flashMode);
8139 }
8140 } else if (aeMode == CAM_AE_MODE_ON) {
8141 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8142 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8143 } else if (aeMode == CAM_AE_MODE_OFF) {
8144 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8145 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008146 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8147 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8148 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008149 } else {
8150 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8151 "flashMode:%d, aeMode:%u!!!",
8152 redeye, flashMode, aeMode);
8153 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008154 if (mInstantAEC) {
8155 // Increment frame Idx count untill a bound reached for instant AEC.
8156 mInstantAecFrameIdxCount++;
8157 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8158 CAM_INTF_META_AEC_INFO, metadata) {
8159 LOGH("ae_params->settled = %d",ae_params->settled);
8160 // If AEC settled, or if number of frames reached bound value,
8161 // should reset instant AEC.
8162 if (ae_params->settled ||
8163 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8164 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8165 mInstantAEC = false;
8166 mResetInstantAEC = true;
8167 mInstantAecFrameIdxCount = 0;
8168 }
8169 }
8170 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008171 resultMetadata = camMetadata.release();
8172 return resultMetadata;
8173}
8174
8175/*===========================================================================
8176 * FUNCTION : dumpMetadataToFile
8177 *
8178 * DESCRIPTION: Dumps tuning metadata to file system
8179 *
8180 * PARAMETERS :
8181 * @meta : tuning metadata
8182 * @dumpFrameCount : current dump frame count
8183 * @enabled : Enable mask
8184 *
8185 *==========================================================================*/
8186void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8187 uint32_t &dumpFrameCount,
8188 bool enabled,
8189 const char *type,
8190 uint32_t frameNumber)
8191{
8192 //Some sanity checks
8193 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8194 LOGE("Tuning sensor data size bigger than expected %d: %d",
8195 meta.tuning_sensor_data_size,
8196 TUNING_SENSOR_DATA_MAX);
8197 return;
8198 }
8199
8200 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8201 LOGE("Tuning VFE data size bigger than expected %d: %d",
8202 meta.tuning_vfe_data_size,
8203 TUNING_VFE_DATA_MAX);
8204 return;
8205 }
8206
8207 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8208 LOGE("Tuning CPP data size bigger than expected %d: %d",
8209 meta.tuning_cpp_data_size,
8210 TUNING_CPP_DATA_MAX);
8211 return;
8212 }
8213
8214 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8215 LOGE("Tuning CAC data size bigger than expected %d: %d",
8216 meta.tuning_cac_data_size,
8217 TUNING_CAC_DATA_MAX);
8218 return;
8219 }
8220 //
8221
8222 if(enabled){
8223 char timeBuf[FILENAME_MAX];
8224 char buf[FILENAME_MAX];
8225 memset(buf, 0, sizeof(buf));
8226 memset(timeBuf, 0, sizeof(timeBuf));
8227 time_t current_time;
8228 struct tm * timeinfo;
8229 time (&current_time);
8230 timeinfo = localtime (&current_time);
8231 if (timeinfo != NULL) {
8232 strftime (timeBuf, sizeof(timeBuf),
8233 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8234 }
8235 String8 filePath(timeBuf);
8236 snprintf(buf,
8237 sizeof(buf),
8238 "%dm_%s_%d.bin",
8239 dumpFrameCount,
8240 type,
8241 frameNumber);
8242 filePath.append(buf);
8243 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8244 if (file_fd >= 0) {
8245 ssize_t written_len = 0;
8246 meta.tuning_data_version = TUNING_DATA_VERSION;
8247 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8248 written_len += write(file_fd, data, sizeof(uint32_t));
8249 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8250 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8251 written_len += write(file_fd, data, sizeof(uint32_t));
8252 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8253 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8254 written_len += write(file_fd, data, sizeof(uint32_t));
8255 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8256 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8257 written_len += write(file_fd, data, sizeof(uint32_t));
8258 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8259 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8260 written_len += write(file_fd, data, sizeof(uint32_t));
8261 meta.tuning_mod3_data_size = 0;
8262 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8263 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8264 written_len += write(file_fd, data, sizeof(uint32_t));
8265 size_t total_size = meta.tuning_sensor_data_size;
8266 data = (void *)((uint8_t *)&meta.data);
8267 written_len += write(file_fd, data, total_size);
8268 total_size = meta.tuning_vfe_data_size;
8269 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8270 written_len += write(file_fd, data, total_size);
8271 total_size = meta.tuning_cpp_data_size;
8272 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8273 written_len += write(file_fd, data, total_size);
8274 total_size = meta.tuning_cac_data_size;
8275 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8276 written_len += write(file_fd, data, total_size);
8277 close(file_fd);
8278 }else {
8279 LOGE("fail to open file for metadata dumping");
8280 }
8281 }
8282}
8283
8284/*===========================================================================
8285 * FUNCTION : cleanAndSortStreamInfo
8286 *
8287 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8288 * and sort them such that raw stream is at the end of the list
8289 * This is a workaround for camera daemon constraint.
8290 *
8291 * PARAMETERS : None
8292 *
8293 *==========================================================================*/
8294void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8295{
8296 List<stream_info_t *> newStreamInfo;
8297
8298 /*clean up invalid streams*/
8299 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8300 it != mStreamInfo.end();) {
8301 if(((*it)->status) == INVALID){
8302 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8303 delete channel;
8304 free(*it);
8305 it = mStreamInfo.erase(it);
8306 } else {
8307 it++;
8308 }
8309 }
8310
8311 // Move preview/video/callback/snapshot streams into newList
8312 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8313 it != mStreamInfo.end();) {
8314 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8315 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8316 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8317 newStreamInfo.push_back(*it);
8318 it = mStreamInfo.erase(it);
8319 } else
8320 it++;
8321 }
8322 // Move raw streams into newList
8323 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8324 it != mStreamInfo.end();) {
8325 newStreamInfo.push_back(*it);
8326 it = mStreamInfo.erase(it);
8327 }
8328
8329 mStreamInfo = newStreamInfo;
8330}
8331
8332/*===========================================================================
8333 * FUNCTION : extractJpegMetadata
8334 *
8335 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8336 * JPEG metadata is cached in HAL, and return as part of capture
8337 * result when metadata is returned from camera daemon.
8338 *
8339 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8340 * @request: capture request
8341 *
8342 *==========================================================================*/
8343void QCamera3HardwareInterface::extractJpegMetadata(
8344 CameraMetadata& jpegMetadata,
8345 const camera3_capture_request_t *request)
8346{
8347 CameraMetadata frame_settings;
8348 frame_settings = request->settings;
8349
8350 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8351 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8352 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8353 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8354
8355 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8356 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8357 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8358 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8359
8360 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8361 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8362 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8363 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8364
8365 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8366 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8367 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8368 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8369
8370 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8371 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8372 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8373 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8374
8375 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8376 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8377 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8378 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8379
8380 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8381 int32_t thumbnail_size[2];
8382 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8383 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8384 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8385 int32_t orientation =
8386 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008387 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008388 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8389 int32_t temp;
8390 temp = thumbnail_size[0];
8391 thumbnail_size[0] = thumbnail_size[1];
8392 thumbnail_size[1] = temp;
8393 }
8394 }
8395 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8396 thumbnail_size,
8397 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8398 }
8399
8400}
8401
8402/*===========================================================================
8403 * FUNCTION : convertToRegions
8404 *
8405 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8406 *
8407 * PARAMETERS :
8408 * @rect : cam_rect_t struct to convert
8409 * @region : int32_t destination array
8410 * @weight : if we are converting from cam_area_t, weight is valid
8411 * else weight = -1
8412 *
8413 *==========================================================================*/
8414void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8415 int32_t *region, int weight)
8416{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008417 region[FACE_LEFT] = rect.left;
8418 region[FACE_TOP] = rect.top;
8419 region[FACE_RIGHT] = rect.left + rect.width;
8420 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008421 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008422 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008423 }
8424}
8425
8426/*===========================================================================
8427 * FUNCTION : convertFromRegions
8428 *
8429 * DESCRIPTION: helper method to convert from array to cam_rect_t
8430 *
8431 * PARAMETERS :
8432 * @rect : cam_rect_t struct to convert
8433 * @region : int32_t destination array
8434 * @weight : if we are converting from cam_area_t, weight is valid
8435 * else weight = -1
8436 *
8437 *==========================================================================*/
8438void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008439 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008440{
Thierry Strudel3d639192016-09-09 11:52:26 -07008441 int32_t x_min = frame_settings.find(tag).data.i32[0];
8442 int32_t y_min = frame_settings.find(tag).data.i32[1];
8443 int32_t x_max = frame_settings.find(tag).data.i32[2];
8444 int32_t y_max = frame_settings.find(tag).data.i32[3];
8445 roi.weight = frame_settings.find(tag).data.i32[4];
8446 roi.rect.left = x_min;
8447 roi.rect.top = y_min;
8448 roi.rect.width = x_max - x_min;
8449 roi.rect.height = y_max - y_min;
8450}
8451
8452/*===========================================================================
8453 * FUNCTION : resetIfNeededROI
8454 *
8455 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8456 * crop region
8457 *
8458 * PARAMETERS :
8459 * @roi : cam_area_t struct to resize
8460 * @scalerCropRegion : cam_crop_region_t region to compare against
8461 *
8462 *
8463 *==========================================================================*/
8464bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8465 const cam_crop_region_t* scalerCropRegion)
8466{
8467 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8468 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8469 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8470 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8471
8472 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8473 * without having this check the calculations below to validate if the roi
8474 * is inside scalar crop region will fail resulting in the roi not being
8475 * reset causing algorithm to continue to use stale roi window
8476 */
8477 if (roi->weight == 0) {
8478 return true;
8479 }
8480
8481 if ((roi_x_max < scalerCropRegion->left) ||
8482 // right edge of roi window is left of scalar crop's left edge
8483 (roi_y_max < scalerCropRegion->top) ||
8484 // bottom edge of roi window is above scalar crop's top edge
8485 (roi->rect.left > crop_x_max) ||
8486 // left edge of roi window is beyond(right) of scalar crop's right edge
8487 (roi->rect.top > crop_y_max)){
8488 // top edge of roi windo is above scalar crop's top edge
8489 return false;
8490 }
8491 if (roi->rect.left < scalerCropRegion->left) {
8492 roi->rect.left = scalerCropRegion->left;
8493 }
8494 if (roi->rect.top < scalerCropRegion->top) {
8495 roi->rect.top = scalerCropRegion->top;
8496 }
8497 if (roi_x_max > crop_x_max) {
8498 roi_x_max = crop_x_max;
8499 }
8500 if (roi_y_max > crop_y_max) {
8501 roi_y_max = crop_y_max;
8502 }
8503 roi->rect.width = roi_x_max - roi->rect.left;
8504 roi->rect.height = roi_y_max - roi->rect.top;
8505 return true;
8506}
8507
8508/*===========================================================================
8509 * FUNCTION : convertLandmarks
8510 *
8511 * DESCRIPTION: helper method to extract the landmarks from face detection info
8512 *
8513 * PARAMETERS :
8514 * @landmark_data : input landmark data to be converted
8515 * @landmarks : int32_t destination array
8516 *
8517 *
8518 *==========================================================================*/
8519void QCamera3HardwareInterface::convertLandmarks(
8520 cam_face_landmarks_info_t landmark_data,
8521 int32_t *landmarks)
8522{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008523 if (landmark_data.is_left_eye_valid) {
8524 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8525 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8526 } else {
8527 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8528 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8529 }
8530
8531 if (landmark_data.is_right_eye_valid) {
8532 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8533 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8534 } else {
8535 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8536 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8537 }
8538
8539 if (landmark_data.is_mouth_valid) {
8540 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8541 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8542 } else {
8543 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8544 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8545 }
8546}
8547
8548/*===========================================================================
8549 * FUNCTION : setInvalidLandmarks
8550 *
8551 * DESCRIPTION: helper method to set invalid landmarks
8552 *
8553 * PARAMETERS :
8554 * @landmarks : int32_t destination array
8555 *
8556 *
8557 *==========================================================================*/
8558void QCamera3HardwareInterface::setInvalidLandmarks(
8559 int32_t *landmarks)
8560{
8561 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8562 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8563 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8564 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8565 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8566 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008567}
8568
8569#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008570
8571/*===========================================================================
8572 * FUNCTION : getCapabilities
8573 *
8574 * DESCRIPTION: query camera capability from back-end
8575 *
8576 * PARAMETERS :
8577 * @ops : mm-interface ops structure
8578 * @cam_handle : camera handle for which we need capability
8579 *
8580 * RETURN : ptr type of capability structure
8581 * capability for success
8582 * NULL for failure
8583 *==========================================================================*/
8584cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8585 uint32_t cam_handle)
8586{
8587 int rc = NO_ERROR;
8588 QCamera3HeapMemory *capabilityHeap = NULL;
8589 cam_capability_t *cap_ptr = NULL;
8590
8591 if (ops == NULL) {
8592 LOGE("Invalid arguments");
8593 return NULL;
8594 }
8595
8596 capabilityHeap = new QCamera3HeapMemory(1);
8597 if (capabilityHeap == NULL) {
8598 LOGE("creation of capabilityHeap failed");
8599 return NULL;
8600 }
8601
8602 /* Allocate memory for capability buffer */
8603 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8604 if(rc != OK) {
8605 LOGE("No memory for cappability");
8606 goto allocate_failed;
8607 }
8608
8609 /* Map memory for capability buffer */
8610 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8611
8612 rc = ops->map_buf(cam_handle,
8613 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8614 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8615 if(rc < 0) {
8616 LOGE("failed to map capability buffer");
8617 rc = FAILED_TRANSACTION;
8618 goto map_failed;
8619 }
8620
8621 /* Query Capability */
8622 rc = ops->query_capability(cam_handle);
8623 if(rc < 0) {
8624 LOGE("failed to query capability");
8625 rc = FAILED_TRANSACTION;
8626 goto query_failed;
8627 }
8628
8629 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8630 if (cap_ptr == NULL) {
8631 LOGE("out of memory");
8632 rc = NO_MEMORY;
8633 goto query_failed;
8634 }
8635
8636 memset(cap_ptr, 0, sizeof(cam_capability_t));
8637 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8638
8639 int index;
8640 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8641 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8642 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8643 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8644 }
8645
8646query_failed:
8647 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8648map_failed:
8649 capabilityHeap->deallocate();
8650allocate_failed:
8651 delete capabilityHeap;
8652
8653 if (rc != NO_ERROR) {
8654 return NULL;
8655 } else {
8656 return cap_ptr;
8657 }
8658}
8659
Thierry Strudel3d639192016-09-09 11:52:26 -07008660/*===========================================================================
8661 * FUNCTION : initCapabilities
8662 *
8663 * DESCRIPTION: initialize camera capabilities in static data struct
8664 *
8665 * PARAMETERS :
8666 * @cameraId : camera Id
8667 *
8668 * RETURN : int32_t type of status
8669 * NO_ERROR -- success
8670 * none-zero failure code
8671 *==========================================================================*/
8672int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8673{
8674 int rc = 0;
8675 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008676 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008677
8678 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8679 if (rc) {
8680 LOGE("camera_open failed. rc = %d", rc);
8681 goto open_failed;
8682 }
8683 if (!cameraHandle) {
8684 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8685 goto open_failed;
8686 }
8687
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008688 handle = get_main_camera_handle(cameraHandle->camera_handle);
8689 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8690 if (gCamCapability[cameraId] == NULL) {
8691 rc = FAILED_TRANSACTION;
8692 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008693 }
8694
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008695 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008696 if (is_dual_camera_by_idx(cameraId)) {
8697 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8698 gCamCapability[cameraId]->aux_cam_cap =
8699 getCapabilities(cameraHandle->ops, handle);
8700 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8701 rc = FAILED_TRANSACTION;
8702 free(gCamCapability[cameraId]);
8703 goto failed_op;
8704 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008705
8706 // Copy the main camera capability to main_cam_cap struct
8707 gCamCapability[cameraId]->main_cam_cap =
8708 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8709 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8710 LOGE("out of memory");
8711 rc = NO_MEMORY;
8712 goto failed_op;
8713 }
8714 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8715 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008716 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008717failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008718 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8719 cameraHandle = NULL;
8720open_failed:
8721 return rc;
8722}
8723
8724/*==========================================================================
8725 * FUNCTION : get3Aversion
8726 *
8727 * DESCRIPTION: get the Q3A S/W version
8728 *
8729 * PARAMETERS :
8730 * @sw_version: Reference of Q3A structure which will hold version info upon
8731 * return
8732 *
8733 * RETURN : None
8734 *
8735 *==========================================================================*/
8736void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8737{
8738 if(gCamCapability[mCameraId])
8739 sw_version = gCamCapability[mCameraId]->q3a_version;
8740 else
8741 LOGE("Capability structure NULL!");
8742}
8743
8744
8745/*===========================================================================
8746 * FUNCTION : initParameters
8747 *
8748 * DESCRIPTION: initialize camera parameters
8749 *
8750 * PARAMETERS :
8751 *
8752 * RETURN : int32_t type of status
8753 * NO_ERROR -- success
8754 * none-zero failure code
8755 *==========================================================================*/
8756int QCamera3HardwareInterface::initParameters()
8757{
8758 int rc = 0;
8759
8760 //Allocate Set Param Buffer
8761 mParamHeap = new QCamera3HeapMemory(1);
8762 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8763 if(rc != OK) {
8764 rc = NO_MEMORY;
8765 LOGE("Failed to allocate SETPARM Heap memory");
8766 delete mParamHeap;
8767 mParamHeap = NULL;
8768 return rc;
8769 }
8770
8771 //Map memory for parameters buffer
8772 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8773 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8774 mParamHeap->getFd(0),
8775 sizeof(metadata_buffer_t),
8776 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8777 if(rc < 0) {
8778 LOGE("failed to map SETPARM buffer");
8779 rc = FAILED_TRANSACTION;
8780 mParamHeap->deallocate();
8781 delete mParamHeap;
8782 mParamHeap = NULL;
8783 return rc;
8784 }
8785
8786 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8787
8788 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8789 return rc;
8790}
8791
8792/*===========================================================================
8793 * FUNCTION : deinitParameters
8794 *
8795 * DESCRIPTION: de-initialize camera parameters
8796 *
8797 * PARAMETERS :
8798 *
8799 * RETURN : NONE
8800 *==========================================================================*/
8801void QCamera3HardwareInterface::deinitParameters()
8802{
8803 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8804 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8805
8806 mParamHeap->deallocate();
8807 delete mParamHeap;
8808 mParamHeap = NULL;
8809
8810 mParameters = NULL;
8811
8812 free(mPrevParameters);
8813 mPrevParameters = NULL;
8814}
8815
8816/*===========================================================================
8817 * FUNCTION : calcMaxJpegSize
8818 *
8819 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8820 *
8821 * PARAMETERS :
8822 *
8823 * RETURN : max_jpeg_size
8824 *==========================================================================*/
8825size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8826{
8827 size_t max_jpeg_size = 0;
8828 size_t temp_width, temp_height;
8829 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8830 MAX_SIZES_CNT);
8831 for (size_t i = 0; i < count; i++) {
8832 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8833 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8834 if (temp_width * temp_height > max_jpeg_size ) {
8835 max_jpeg_size = temp_width * temp_height;
8836 }
8837 }
8838 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8839 return max_jpeg_size;
8840}
8841
8842/*===========================================================================
8843 * FUNCTION : getMaxRawSize
8844 *
8845 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8846 *
8847 * PARAMETERS :
8848 *
8849 * RETURN : Largest supported Raw Dimension
8850 *==========================================================================*/
8851cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8852{
8853 int max_width = 0;
8854 cam_dimension_t maxRawSize;
8855
8856 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8857 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8858 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8859 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8860 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8861 }
8862 }
8863 return maxRawSize;
8864}
8865
8866
8867/*===========================================================================
8868 * FUNCTION : calcMaxJpegDim
8869 *
8870 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8871 *
8872 * PARAMETERS :
8873 *
8874 * RETURN : max_jpeg_dim
8875 *==========================================================================*/
8876cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8877{
8878 cam_dimension_t max_jpeg_dim;
8879 cam_dimension_t curr_jpeg_dim;
8880 max_jpeg_dim.width = 0;
8881 max_jpeg_dim.height = 0;
8882 curr_jpeg_dim.width = 0;
8883 curr_jpeg_dim.height = 0;
8884 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8885 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8886 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8887 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8888 max_jpeg_dim.width * max_jpeg_dim.height ) {
8889 max_jpeg_dim.width = curr_jpeg_dim.width;
8890 max_jpeg_dim.height = curr_jpeg_dim.height;
8891 }
8892 }
8893 return max_jpeg_dim;
8894}
8895
8896/*===========================================================================
8897 * FUNCTION : addStreamConfig
8898 *
8899 * DESCRIPTION: adds the stream configuration to the array
8900 *
8901 * PARAMETERS :
8902 * @available_stream_configs : pointer to stream configuration array
8903 * @scalar_format : scalar format
8904 * @dim : configuration dimension
8905 * @config_type : input or output configuration type
8906 *
8907 * RETURN : NONE
8908 *==========================================================================*/
8909void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8910 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8911{
8912 available_stream_configs.add(scalar_format);
8913 available_stream_configs.add(dim.width);
8914 available_stream_configs.add(dim.height);
8915 available_stream_configs.add(config_type);
8916}
8917
8918/*===========================================================================
8919 * FUNCTION : suppportBurstCapture
8920 *
8921 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8922 *
8923 * PARAMETERS :
8924 * @cameraId : camera Id
8925 *
8926 * RETURN : true if camera supports BURST_CAPTURE
8927 * false otherwise
8928 *==========================================================================*/
8929bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8930{
8931 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8932 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8933 const int32_t highResWidth = 3264;
8934 const int32_t highResHeight = 2448;
8935
8936 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8937 // Maximum resolution images cannot be captured at >= 10fps
8938 // -> not supporting BURST_CAPTURE
8939 return false;
8940 }
8941
8942 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8943 // Maximum resolution images can be captured at >= 20fps
8944 // --> supporting BURST_CAPTURE
8945 return true;
8946 }
8947
8948 // Find the smallest highRes resolution, or largest resolution if there is none
8949 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8950 MAX_SIZES_CNT);
8951 size_t highRes = 0;
8952 while ((highRes + 1 < totalCnt) &&
8953 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8954 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8955 highResWidth * highResHeight)) {
8956 highRes++;
8957 }
8958 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8959 return true;
8960 } else {
8961 return false;
8962 }
8963}
8964
8965/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008966 * FUNCTION : getPDStatIndex
8967 *
8968 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8969 *
8970 * PARAMETERS :
8971 * @caps : camera capabilities
8972 *
8973 * RETURN : int32_t type
8974 * non-negative - on success
8975 * -1 - on failure
8976 *==========================================================================*/
8977int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
8978 if (nullptr == caps) {
8979 return -1;
8980 }
8981
8982 uint32_t metaRawCount = caps->meta_raw_channel_count;
8983 int32_t ret = -1;
8984 for (size_t i = 0; i < metaRawCount; i++) {
8985 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
8986 ret = i;
8987 break;
8988 }
8989 }
8990
8991 return ret;
8992}
8993
8994/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07008995 * FUNCTION : initStaticMetadata
8996 *
8997 * DESCRIPTION: initialize the static metadata
8998 *
8999 * PARAMETERS :
9000 * @cameraId : camera Id
9001 *
9002 * RETURN : int32_t type of status
9003 * 0 -- success
9004 * non-zero failure code
9005 *==========================================================================*/
9006int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9007{
9008 int rc = 0;
9009 CameraMetadata staticInfo;
9010 size_t count = 0;
9011 bool limitedDevice = false;
9012 char prop[PROPERTY_VALUE_MAX];
9013 bool supportBurst = false;
9014
9015 supportBurst = supportBurstCapture(cameraId);
9016
9017 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9018 * guaranteed or if min fps of max resolution is less than 20 fps, its
9019 * advertised as limited device*/
9020 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9021 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9022 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9023 !supportBurst;
9024
9025 uint8_t supportedHwLvl = limitedDevice ?
9026 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009027#ifndef USE_HAL_3_3
9028 // LEVEL_3 - This device will support level 3.
9029 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9030#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009031 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009032#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009033
9034 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9035 &supportedHwLvl, 1);
9036
9037 bool facingBack = false;
9038 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9039 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9040 facingBack = true;
9041 }
9042 /*HAL 3 only*/
9043 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9044 &gCamCapability[cameraId]->min_focus_distance, 1);
9045
9046 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9047 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9048
9049 /*should be using focal lengths but sensor doesn't provide that info now*/
9050 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9051 &gCamCapability[cameraId]->focal_length,
9052 1);
9053
9054 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9055 gCamCapability[cameraId]->apertures,
9056 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9057
9058 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9059 gCamCapability[cameraId]->filter_densities,
9060 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9061
9062
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009063 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9064 size_t mode_count =
9065 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9066 for (size_t i = 0; i < mode_count; i++) {
9067 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9068 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009069 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009070 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009071
9072 int32_t lens_shading_map_size[] = {
9073 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9074 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9075 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9076 lens_shading_map_size,
9077 sizeof(lens_shading_map_size)/sizeof(int32_t));
9078
9079 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9080 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9081
9082 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9083 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9084
9085 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9086 &gCamCapability[cameraId]->max_frame_duration, 1);
9087
9088 camera_metadata_rational baseGainFactor = {
9089 gCamCapability[cameraId]->base_gain_factor.numerator,
9090 gCamCapability[cameraId]->base_gain_factor.denominator};
9091 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9092 &baseGainFactor, 1);
9093
9094 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9095 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9096
9097 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9098 gCamCapability[cameraId]->pixel_array_size.height};
9099 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9100 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9101
9102 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9103 gCamCapability[cameraId]->active_array_size.top,
9104 gCamCapability[cameraId]->active_array_size.width,
9105 gCamCapability[cameraId]->active_array_size.height};
9106 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9107 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9108
9109 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9110 &gCamCapability[cameraId]->white_level, 1);
9111
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009112 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9113 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9114 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009115 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009116 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009117
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009118#ifndef USE_HAL_3_3
9119 bool hasBlackRegions = false;
9120 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9121 LOGW("black_region_count: %d is bounded to %d",
9122 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9123 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9124 }
9125 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9126 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9127 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9128 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9129 }
9130 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9131 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9132 hasBlackRegions = true;
9133 }
9134#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009135 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9136 &gCamCapability[cameraId]->flash_charge_duration, 1);
9137
9138 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9139 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9140
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009141 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9142 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9143 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009144 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9145 &timestampSource, 1);
9146
Thierry Strudel54dc9782017-02-15 12:12:10 -08009147 //update histogram vendor data
9148 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009149 &gCamCapability[cameraId]->histogram_size, 1);
9150
Thierry Strudel54dc9782017-02-15 12:12:10 -08009151 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009152 &gCamCapability[cameraId]->max_histogram_count, 1);
9153
Shuzhen Wang14415f52016-11-16 18:26:18 -08009154 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9155 //so that app can request fewer number of bins than the maximum supported.
9156 std::vector<int32_t> histBins;
9157 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9158 histBins.push_back(maxHistBins);
9159 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9160 (maxHistBins & 0x1) == 0) {
9161 histBins.push_back(maxHistBins >> 1);
9162 maxHistBins >>= 1;
9163 }
9164 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9165 histBins.data(), histBins.size());
9166
Thierry Strudel3d639192016-09-09 11:52:26 -07009167 int32_t sharpness_map_size[] = {
9168 gCamCapability[cameraId]->sharpness_map_size.width,
9169 gCamCapability[cameraId]->sharpness_map_size.height};
9170
9171 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9172 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9173
9174 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9175 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9176
Emilian Peev0f3c3162017-03-15 12:57:46 +00009177 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9178 if (0 <= indexPD) {
9179 // Advertise PD stats data as part of the Depth capabilities
9180 int32_t depthWidth =
9181 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9182 int32_t depthHeight =
9183 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9184 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9185 assert(0 < depthSamplesCount);
9186 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9187 &depthSamplesCount, 1);
9188
9189 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9190 depthHeight,
9191 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9192 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9193 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9194 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9195 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9196
9197 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9198 depthHeight, 33333333,
9199 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9200 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9201 depthMinDuration,
9202 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9203
9204 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9205 depthHeight, 0,
9206 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9207 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9208 depthStallDuration,
9209 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9210
9211 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9212 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9213 }
9214
Thierry Strudel3d639192016-09-09 11:52:26 -07009215 int32_t scalar_formats[] = {
9216 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9217 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9218 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9219 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9220 HAL_PIXEL_FORMAT_RAW10,
9221 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009222 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9223 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9224 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009225
9226 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9227 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9228 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9229 count, MAX_SIZES_CNT, available_processed_sizes);
9230 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9231 available_processed_sizes, count * 2);
9232
9233 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9234 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9235 makeTable(gCamCapability[cameraId]->raw_dim,
9236 count, MAX_SIZES_CNT, available_raw_sizes);
9237 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9238 available_raw_sizes, count * 2);
9239
9240 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9241 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9242 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9243 count, MAX_SIZES_CNT, available_fps_ranges);
9244 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9245 available_fps_ranges, count * 2);
9246
9247 camera_metadata_rational exposureCompensationStep = {
9248 gCamCapability[cameraId]->exp_compensation_step.numerator,
9249 gCamCapability[cameraId]->exp_compensation_step.denominator};
9250 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9251 &exposureCompensationStep, 1);
9252
9253 Vector<uint8_t> availableVstabModes;
9254 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9255 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009256 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009257 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009258 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009259 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009260 count = IS_TYPE_MAX;
9261 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9262 for (size_t i = 0; i < count; i++) {
9263 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9264 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9265 eisSupported = true;
9266 break;
9267 }
9268 }
9269 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009270 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9271 }
9272 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9273 availableVstabModes.array(), availableVstabModes.size());
9274
9275 /*HAL 1 and HAL 3 common*/
9276 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9277 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9278 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009279 // Cap the max zoom to the max preferred value
9280 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009281 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9282 &maxZoom, 1);
9283
9284 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9285 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9286
9287 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9288 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9289 max3aRegions[2] = 0; /* AF not supported */
9290 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9291 max3aRegions, 3);
9292
9293 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9294 memset(prop, 0, sizeof(prop));
9295 property_get("persist.camera.facedetect", prop, "1");
9296 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9297 LOGD("Support face detection mode: %d",
9298 supportedFaceDetectMode);
9299
9300 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009301 /* support mode should be OFF if max number of face is 0 */
9302 if (maxFaces <= 0) {
9303 supportedFaceDetectMode = 0;
9304 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009305 Vector<uint8_t> availableFaceDetectModes;
9306 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9307 if (supportedFaceDetectMode == 1) {
9308 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9309 } else if (supportedFaceDetectMode == 2) {
9310 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9311 } else if (supportedFaceDetectMode == 3) {
9312 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9313 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9314 } else {
9315 maxFaces = 0;
9316 }
9317 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9318 availableFaceDetectModes.array(),
9319 availableFaceDetectModes.size());
9320 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9321 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009322 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9323 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9324 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009325
9326 int32_t exposureCompensationRange[] = {
9327 gCamCapability[cameraId]->exposure_compensation_min,
9328 gCamCapability[cameraId]->exposure_compensation_max};
9329 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9330 exposureCompensationRange,
9331 sizeof(exposureCompensationRange)/sizeof(int32_t));
9332
9333 uint8_t lensFacing = (facingBack) ?
9334 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9335 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9336
9337 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9338 available_thumbnail_sizes,
9339 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9340
9341 /*all sizes will be clubbed into this tag*/
9342 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9343 /*android.scaler.availableStreamConfigurations*/
9344 Vector<int32_t> available_stream_configs;
9345 cam_dimension_t active_array_dim;
9346 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9347 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009348
9349 /*advertise list of input dimensions supported based on below property.
9350 By default all sizes upto 5MP will be advertised.
9351 Note that the setprop resolution format should be WxH.
9352 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9353 To list all supported sizes, setprop needs to be set with "0x0" */
9354 cam_dimension_t minInputSize = {2592,1944}; //5MP
9355 memset(prop, 0, sizeof(prop));
9356 property_get("persist.camera.input.minsize", prop, "2592x1944");
9357 if (strlen(prop) > 0) {
9358 char *saveptr = NULL;
9359 char *token = strtok_r(prop, "x", &saveptr);
9360 if (token != NULL) {
9361 minInputSize.width = atoi(token);
9362 }
9363 token = strtok_r(NULL, "x", &saveptr);
9364 if (token != NULL) {
9365 minInputSize.height = atoi(token);
9366 }
9367 }
9368
Thierry Strudel3d639192016-09-09 11:52:26 -07009369 /* Add input/output stream configurations for each scalar formats*/
9370 for (size_t j = 0; j < scalar_formats_count; j++) {
9371 switch (scalar_formats[j]) {
9372 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9373 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9374 case HAL_PIXEL_FORMAT_RAW10:
9375 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9376 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9377 addStreamConfig(available_stream_configs, scalar_formats[j],
9378 gCamCapability[cameraId]->raw_dim[i],
9379 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9380 }
9381 break;
9382 case HAL_PIXEL_FORMAT_BLOB:
9383 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9384 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9385 addStreamConfig(available_stream_configs, scalar_formats[j],
9386 gCamCapability[cameraId]->picture_sizes_tbl[i],
9387 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9388 }
9389 break;
9390 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9391 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9392 default:
9393 cam_dimension_t largest_picture_size;
9394 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9395 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9396 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9397 addStreamConfig(available_stream_configs, scalar_formats[j],
9398 gCamCapability[cameraId]->picture_sizes_tbl[i],
9399 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009400 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9401 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9402 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9403 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9404 >= minInputSize.width) || (gCamCapability[cameraId]->
9405 picture_sizes_tbl[i].height >= minInputSize.height)) {
9406 addStreamConfig(available_stream_configs, scalar_formats[j],
9407 gCamCapability[cameraId]->picture_sizes_tbl[i],
9408 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9409 }
9410 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009411 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009412
Thierry Strudel3d639192016-09-09 11:52:26 -07009413 break;
9414 }
9415 }
9416
9417 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9418 available_stream_configs.array(), available_stream_configs.size());
9419 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9420 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9421
9422 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9423 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9424
9425 /* android.scaler.availableMinFrameDurations */
9426 Vector<int64_t> available_min_durations;
9427 for (size_t j = 0; j < scalar_formats_count; j++) {
9428 switch (scalar_formats[j]) {
9429 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9430 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9431 case HAL_PIXEL_FORMAT_RAW10:
9432 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9433 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9434 available_min_durations.add(scalar_formats[j]);
9435 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9436 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9437 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9438 }
9439 break;
9440 default:
9441 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9442 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9443 available_min_durations.add(scalar_formats[j]);
9444 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9445 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9446 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9447 }
9448 break;
9449 }
9450 }
9451 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9452 available_min_durations.array(), available_min_durations.size());
9453
9454 Vector<int32_t> available_hfr_configs;
9455 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9456 int32_t fps = 0;
9457 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9458 case CAM_HFR_MODE_60FPS:
9459 fps = 60;
9460 break;
9461 case CAM_HFR_MODE_90FPS:
9462 fps = 90;
9463 break;
9464 case CAM_HFR_MODE_120FPS:
9465 fps = 120;
9466 break;
9467 case CAM_HFR_MODE_150FPS:
9468 fps = 150;
9469 break;
9470 case CAM_HFR_MODE_180FPS:
9471 fps = 180;
9472 break;
9473 case CAM_HFR_MODE_210FPS:
9474 fps = 210;
9475 break;
9476 case CAM_HFR_MODE_240FPS:
9477 fps = 240;
9478 break;
9479 case CAM_HFR_MODE_480FPS:
9480 fps = 480;
9481 break;
9482 case CAM_HFR_MODE_OFF:
9483 case CAM_HFR_MODE_MAX:
9484 default:
9485 break;
9486 }
9487
9488 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9489 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9490 /* For each HFR frame rate, need to advertise one variable fps range
9491 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9492 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9493 * set by the app. When video recording is started, [120, 120] is
9494 * set. This way sensor configuration does not change when recording
9495 * is started */
9496
9497 /* (width, height, fps_min, fps_max, batch_size_max) */
9498 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9499 j < MAX_SIZES_CNT; j++) {
9500 available_hfr_configs.add(
9501 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9502 available_hfr_configs.add(
9503 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9504 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9505 available_hfr_configs.add(fps);
9506 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9507
9508 /* (width, height, fps_min, fps_max, batch_size_max) */
9509 available_hfr_configs.add(
9510 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9511 available_hfr_configs.add(
9512 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9513 available_hfr_configs.add(fps);
9514 available_hfr_configs.add(fps);
9515 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9516 }
9517 }
9518 }
9519 //Advertise HFR capability only if the property is set
9520 memset(prop, 0, sizeof(prop));
9521 property_get("persist.camera.hal3hfr.enable", prop, "1");
9522 uint8_t hfrEnable = (uint8_t)atoi(prop);
9523
9524 if(hfrEnable && available_hfr_configs.array()) {
9525 staticInfo.update(
9526 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9527 available_hfr_configs.array(), available_hfr_configs.size());
9528 }
9529
9530 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9531 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9532 &max_jpeg_size, 1);
9533
9534 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9535 size_t size = 0;
9536 count = CAM_EFFECT_MODE_MAX;
9537 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9538 for (size_t i = 0; i < count; i++) {
9539 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9540 gCamCapability[cameraId]->supported_effects[i]);
9541 if (NAME_NOT_FOUND != val) {
9542 avail_effects[size] = (uint8_t)val;
9543 size++;
9544 }
9545 }
9546 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9547 avail_effects,
9548 size);
9549
9550 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9551 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9552 size_t supported_scene_modes_cnt = 0;
9553 count = CAM_SCENE_MODE_MAX;
9554 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9555 for (size_t i = 0; i < count; i++) {
9556 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9557 CAM_SCENE_MODE_OFF) {
9558 int val = lookupFwkName(SCENE_MODES_MAP,
9559 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9560 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009561
Thierry Strudel3d639192016-09-09 11:52:26 -07009562 if (NAME_NOT_FOUND != val) {
9563 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9564 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9565 supported_scene_modes_cnt++;
9566 }
9567 }
9568 }
9569 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9570 avail_scene_modes,
9571 supported_scene_modes_cnt);
9572
9573 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9574 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9575 supported_scene_modes_cnt,
9576 CAM_SCENE_MODE_MAX,
9577 scene_mode_overrides,
9578 supported_indexes,
9579 cameraId);
9580
9581 if (supported_scene_modes_cnt == 0) {
9582 supported_scene_modes_cnt = 1;
9583 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9584 }
9585
9586 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9587 scene_mode_overrides, supported_scene_modes_cnt * 3);
9588
9589 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9590 ANDROID_CONTROL_MODE_AUTO,
9591 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9592 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9593 available_control_modes,
9594 3);
9595
9596 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9597 size = 0;
9598 count = CAM_ANTIBANDING_MODE_MAX;
9599 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9600 for (size_t i = 0; i < count; i++) {
9601 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9602 gCamCapability[cameraId]->supported_antibandings[i]);
9603 if (NAME_NOT_FOUND != val) {
9604 avail_antibanding_modes[size] = (uint8_t)val;
9605 size++;
9606 }
9607
9608 }
9609 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9610 avail_antibanding_modes,
9611 size);
9612
9613 uint8_t avail_abberation_modes[] = {
9614 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9615 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9616 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9617 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9618 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9619 if (0 == count) {
9620 // If no aberration correction modes are available for a device, this advertise OFF mode
9621 size = 1;
9622 } else {
9623 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9624 // So, advertize all 3 modes if atleast any one mode is supported as per the
9625 // new M requirement
9626 size = 3;
9627 }
9628 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9629 avail_abberation_modes,
9630 size);
9631
9632 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9633 size = 0;
9634 count = CAM_FOCUS_MODE_MAX;
9635 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9636 for (size_t i = 0; i < count; i++) {
9637 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9638 gCamCapability[cameraId]->supported_focus_modes[i]);
9639 if (NAME_NOT_FOUND != val) {
9640 avail_af_modes[size] = (uint8_t)val;
9641 size++;
9642 }
9643 }
9644 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9645 avail_af_modes,
9646 size);
9647
9648 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9649 size = 0;
9650 count = CAM_WB_MODE_MAX;
9651 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9652 for (size_t i = 0; i < count; i++) {
9653 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9654 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9655 gCamCapability[cameraId]->supported_white_balances[i]);
9656 if (NAME_NOT_FOUND != val) {
9657 avail_awb_modes[size] = (uint8_t)val;
9658 size++;
9659 }
9660 }
9661 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9662 avail_awb_modes,
9663 size);
9664
9665 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9666 count = CAM_FLASH_FIRING_LEVEL_MAX;
9667 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9668 count);
9669 for (size_t i = 0; i < count; i++) {
9670 available_flash_levels[i] =
9671 gCamCapability[cameraId]->supported_firing_levels[i];
9672 }
9673 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9674 available_flash_levels, count);
9675
9676 uint8_t flashAvailable;
9677 if (gCamCapability[cameraId]->flash_available)
9678 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9679 else
9680 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9681 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9682 &flashAvailable, 1);
9683
9684 Vector<uint8_t> avail_ae_modes;
9685 count = CAM_AE_MODE_MAX;
9686 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9687 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009688 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9689 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9690 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9691 }
9692 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009693 }
9694 if (flashAvailable) {
9695 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9696 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9697 }
9698 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9699 avail_ae_modes.array(),
9700 avail_ae_modes.size());
9701
9702 int32_t sensitivity_range[2];
9703 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9704 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9705 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9706 sensitivity_range,
9707 sizeof(sensitivity_range) / sizeof(int32_t));
9708
9709 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9710 &gCamCapability[cameraId]->max_analog_sensitivity,
9711 1);
9712
9713 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9714 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9715 &sensor_orientation,
9716 1);
9717
9718 int32_t max_output_streams[] = {
9719 MAX_STALLING_STREAMS,
9720 MAX_PROCESSED_STREAMS,
9721 MAX_RAW_STREAMS};
9722 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9723 max_output_streams,
9724 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9725
9726 uint8_t avail_leds = 0;
9727 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9728 &avail_leds, 0);
9729
9730 uint8_t focus_dist_calibrated;
9731 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9732 gCamCapability[cameraId]->focus_dist_calibrated);
9733 if (NAME_NOT_FOUND != val) {
9734 focus_dist_calibrated = (uint8_t)val;
9735 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9736 &focus_dist_calibrated, 1);
9737 }
9738
9739 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9740 size = 0;
9741 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9742 MAX_TEST_PATTERN_CNT);
9743 for (size_t i = 0; i < count; i++) {
9744 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9745 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9746 if (NAME_NOT_FOUND != testpatternMode) {
9747 avail_testpattern_modes[size] = testpatternMode;
9748 size++;
9749 }
9750 }
9751 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9752 avail_testpattern_modes,
9753 size);
9754
9755 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9756 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9757 &max_pipeline_depth,
9758 1);
9759
9760 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9761 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9762 &partial_result_count,
9763 1);
9764
9765 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9766 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9767
9768 Vector<uint8_t> available_capabilities;
9769 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9770 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9771 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9772 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9773 if (supportBurst) {
9774 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9775 }
9776 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9777 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9778 if (hfrEnable && available_hfr_configs.array()) {
9779 available_capabilities.add(
9780 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9781 }
9782
9783 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9784 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9785 }
9786 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9787 available_capabilities.array(),
9788 available_capabilities.size());
9789
9790 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9791 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9792 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9793 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9794
9795 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9796 &aeLockAvailable, 1);
9797
9798 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9799 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9800 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9801 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9802
9803 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9804 &awbLockAvailable, 1);
9805
9806 int32_t max_input_streams = 1;
9807 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9808 &max_input_streams,
9809 1);
9810
9811 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9812 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9813 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9814 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9815 HAL_PIXEL_FORMAT_YCbCr_420_888};
9816 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9817 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9818
9819 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9820 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9821 &max_latency,
9822 1);
9823
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009824#ifndef USE_HAL_3_3
9825 int32_t isp_sensitivity_range[2];
9826 isp_sensitivity_range[0] =
9827 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9828 isp_sensitivity_range[1] =
9829 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9830 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9831 isp_sensitivity_range,
9832 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9833#endif
9834
Thierry Strudel3d639192016-09-09 11:52:26 -07009835 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9836 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9837 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9838 available_hot_pixel_modes,
9839 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9840
9841 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9842 ANDROID_SHADING_MODE_FAST,
9843 ANDROID_SHADING_MODE_HIGH_QUALITY};
9844 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9845 available_shading_modes,
9846 3);
9847
9848 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9849 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9850 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9851 available_lens_shading_map_modes,
9852 2);
9853
9854 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9855 ANDROID_EDGE_MODE_FAST,
9856 ANDROID_EDGE_MODE_HIGH_QUALITY,
9857 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9858 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9859 available_edge_modes,
9860 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9861
9862 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9863 ANDROID_NOISE_REDUCTION_MODE_FAST,
9864 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9865 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9866 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9867 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9868 available_noise_red_modes,
9869 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9870
9871 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9872 ANDROID_TONEMAP_MODE_FAST,
9873 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9874 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9875 available_tonemap_modes,
9876 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9877
9878 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9879 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9880 available_hot_pixel_map_modes,
9881 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9882
9883 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9884 gCamCapability[cameraId]->reference_illuminant1);
9885 if (NAME_NOT_FOUND != val) {
9886 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9887 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9888 }
9889
9890 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9891 gCamCapability[cameraId]->reference_illuminant2);
9892 if (NAME_NOT_FOUND != val) {
9893 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9894 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9895 }
9896
9897 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9898 (void *)gCamCapability[cameraId]->forward_matrix1,
9899 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9900
9901 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9902 (void *)gCamCapability[cameraId]->forward_matrix2,
9903 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9904
9905 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9906 (void *)gCamCapability[cameraId]->color_transform1,
9907 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9908
9909 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9910 (void *)gCamCapability[cameraId]->color_transform2,
9911 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9912
9913 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9914 (void *)gCamCapability[cameraId]->calibration_transform1,
9915 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9916
9917 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9918 (void *)gCamCapability[cameraId]->calibration_transform2,
9919 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9920
9921 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9922 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9923 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9924 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9925 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9926 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9927 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9928 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9929 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9930 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9931 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9932 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9933 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9934 ANDROID_JPEG_GPS_COORDINATES,
9935 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9936 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9937 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9938 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9939 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9940 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9941 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9942 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9943 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9944 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009945#ifndef USE_HAL_3_3
9946 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9947#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009948 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009949 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009950 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9951 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009952 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009953 /* DevCamDebug metadata request_keys_basic */
9954 DEVCAMDEBUG_META_ENABLE,
9955 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009956 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07009957 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
9958 NEXUS_EXPERIMENTAL_2017_SENSOR_MODE_FULLFOV
Samuel Ha68ba5172016-12-15 18:41:12 -08009959 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009960
9961 size_t request_keys_cnt =
9962 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9963 Vector<int32_t> available_request_keys;
9964 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9965 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9966 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9967 }
9968
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009969 if (gExposeEnableZslKey) {
9970 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
9971 }
9972
Thierry Strudel3d639192016-09-09 11:52:26 -07009973 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9974 available_request_keys.array(), available_request_keys.size());
9975
9976 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9977 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9978 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9979 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9980 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9981 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9982 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9983 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9984 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9985 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9986 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9987 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9988 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9989 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9990 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9991 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9992 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009993 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009994 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9995 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9996 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009997 ANDROID_STATISTICS_FACE_SCORES,
9998#ifndef USE_HAL_3_3
9999 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10000#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010001 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010002 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010003 // DevCamDebug metadata result_keys_basic
10004 DEVCAMDEBUG_META_ENABLE,
10005 // DevCamDebug metadata result_keys AF
10006 DEVCAMDEBUG_AF_LENS_POSITION,
10007 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10008 DEVCAMDEBUG_AF_TOF_DISTANCE,
10009 DEVCAMDEBUG_AF_LUMA,
10010 DEVCAMDEBUG_AF_HAF_STATE,
10011 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10012 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10013 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10014 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10015 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10016 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10017 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10018 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10019 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10020 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10021 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10022 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10023 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10024 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10025 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10026 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10027 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10028 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10029 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10030 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10031 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10032 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10033 // DevCamDebug metadata result_keys AEC
10034 DEVCAMDEBUG_AEC_TARGET_LUMA,
10035 DEVCAMDEBUG_AEC_COMP_LUMA,
10036 DEVCAMDEBUG_AEC_AVG_LUMA,
10037 DEVCAMDEBUG_AEC_CUR_LUMA,
10038 DEVCAMDEBUG_AEC_LINECOUNT,
10039 DEVCAMDEBUG_AEC_REAL_GAIN,
10040 DEVCAMDEBUG_AEC_EXP_INDEX,
10041 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010042 // DevCamDebug metadata result_keys zzHDR
10043 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10044 DEVCAMDEBUG_AEC_L_LINECOUNT,
10045 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10046 DEVCAMDEBUG_AEC_S_LINECOUNT,
10047 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10048 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10049 // DevCamDebug metadata result_keys ADRC
10050 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10051 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10052 DEVCAMDEBUG_AEC_GTM_RATIO,
10053 DEVCAMDEBUG_AEC_LTM_RATIO,
10054 DEVCAMDEBUG_AEC_LA_RATIO,
10055 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010056 // DevCamDebug metadata result_keys AWB
10057 DEVCAMDEBUG_AWB_R_GAIN,
10058 DEVCAMDEBUG_AWB_G_GAIN,
10059 DEVCAMDEBUG_AWB_B_GAIN,
10060 DEVCAMDEBUG_AWB_CCT,
10061 DEVCAMDEBUG_AWB_DECISION,
10062 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010063 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10064 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10065 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010066 };
10067
Thierry Strudel3d639192016-09-09 11:52:26 -070010068 size_t result_keys_cnt =
10069 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10070
10071 Vector<int32_t> available_result_keys;
10072 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10073 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10074 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10075 }
10076 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10077 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10078 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10079 }
10080 if (supportedFaceDetectMode == 1) {
10081 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10082 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10083 } else if ((supportedFaceDetectMode == 2) ||
10084 (supportedFaceDetectMode == 3)) {
10085 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10086 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10087 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010088#ifndef USE_HAL_3_3
10089 if (hasBlackRegions) {
10090 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10091 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10092 }
10093#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010094
10095 if (gExposeEnableZslKey) {
10096 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10097 }
10098
Thierry Strudel3d639192016-09-09 11:52:26 -070010099 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10100 available_result_keys.array(), available_result_keys.size());
10101
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010102 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010103 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10104 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10105 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10106 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10107 ANDROID_SCALER_CROPPING_TYPE,
10108 ANDROID_SYNC_MAX_LATENCY,
10109 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10110 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10111 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10112 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10113 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10114 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10115 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10116 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10117 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10118 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10119 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10120 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10121 ANDROID_LENS_FACING,
10122 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10123 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10124 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10125 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10126 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10127 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10128 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10129 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10130 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10131 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10132 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10133 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10134 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10135 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10136 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10137 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10138 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10139 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10140 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10141 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010142 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010143 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10144 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10145 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10146 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10147 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10148 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10149 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10150 ANDROID_CONTROL_AVAILABLE_MODES,
10151 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10152 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10153 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10154 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010155 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10156#ifndef USE_HAL_3_3
10157 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10158 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10159#endif
10160 };
10161
10162 Vector<int32_t> available_characteristics_keys;
10163 available_characteristics_keys.appendArray(characteristics_keys_basic,
10164 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10165#ifndef USE_HAL_3_3
10166 if (hasBlackRegions) {
10167 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10168 }
10169#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010170
10171 if (0 <= indexPD) {
10172 int32_t depthKeys[] = {
10173 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10174 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10175 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10176 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10177 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10178 };
10179 available_characteristics_keys.appendArray(depthKeys,
10180 sizeof(depthKeys) / sizeof(depthKeys[0]));
10181 }
10182
Thierry Strudel3d639192016-09-09 11:52:26 -070010183 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010184 available_characteristics_keys.array(),
10185 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010186
10187 /*available stall durations depend on the hw + sw and will be different for different devices */
10188 /*have to add for raw after implementation*/
10189 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10190 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10191
10192 Vector<int64_t> available_stall_durations;
10193 for (uint32_t j = 0; j < stall_formats_count; j++) {
10194 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10195 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10196 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10197 available_stall_durations.add(stall_formats[j]);
10198 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10199 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10200 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10201 }
10202 } else {
10203 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10204 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10205 available_stall_durations.add(stall_formats[j]);
10206 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10207 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10208 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10209 }
10210 }
10211 }
10212 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10213 available_stall_durations.array(),
10214 available_stall_durations.size());
10215
10216 //QCAMERA3_OPAQUE_RAW
10217 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10218 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10219 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10220 case LEGACY_RAW:
10221 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10222 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10223 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10224 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10225 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10226 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10227 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10228 break;
10229 case MIPI_RAW:
10230 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10231 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10232 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10233 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10234 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10235 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10236 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10237 break;
10238 default:
10239 LOGE("unknown opaque_raw_format %d",
10240 gCamCapability[cameraId]->opaque_raw_fmt);
10241 break;
10242 }
10243 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10244
10245 Vector<int32_t> strides;
10246 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10247 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10248 cam_stream_buf_plane_info_t buf_planes;
10249 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10250 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10251 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10252 &gCamCapability[cameraId]->padding_info, &buf_planes);
10253 strides.add(buf_planes.plane_info.mp[0].stride);
10254 }
10255 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10256 strides.size());
10257
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010258 //TBD: remove the following line once backend advertises zzHDR in feature mask
10259 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010260 //Video HDR default
10261 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10262 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010263 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010264 int32_t vhdr_mode[] = {
10265 QCAMERA3_VIDEO_HDR_MODE_OFF,
10266 QCAMERA3_VIDEO_HDR_MODE_ON};
10267
10268 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10269 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10270 vhdr_mode, vhdr_mode_count);
10271 }
10272
Thierry Strudel3d639192016-09-09 11:52:26 -070010273 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10274 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10275 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10276
10277 uint8_t isMonoOnly =
10278 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10279 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10280 &isMonoOnly, 1);
10281
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010282#ifndef USE_HAL_3_3
10283 Vector<int32_t> opaque_size;
10284 for (size_t j = 0; j < scalar_formats_count; j++) {
10285 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10286 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10287 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10288 cam_stream_buf_plane_info_t buf_planes;
10289
10290 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10291 &gCamCapability[cameraId]->padding_info, &buf_planes);
10292
10293 if (rc == 0) {
10294 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10295 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10296 opaque_size.add(buf_planes.plane_info.frame_len);
10297 }else {
10298 LOGE("raw frame calculation failed!");
10299 }
10300 }
10301 }
10302 }
10303
10304 if ((opaque_size.size() > 0) &&
10305 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10306 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10307 else
10308 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10309#endif
10310
Thierry Strudel04e026f2016-10-10 11:27:36 -070010311 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10312 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10313 size = 0;
10314 count = CAM_IR_MODE_MAX;
10315 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10316 for (size_t i = 0; i < count; i++) {
10317 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10318 gCamCapability[cameraId]->supported_ir_modes[i]);
10319 if (NAME_NOT_FOUND != val) {
10320 avail_ir_modes[size] = (int32_t)val;
10321 size++;
10322 }
10323 }
10324 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10325 avail_ir_modes, size);
10326 }
10327
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010328 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10329 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10330 size = 0;
10331 count = CAM_AEC_CONVERGENCE_MAX;
10332 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10333 for (size_t i = 0; i < count; i++) {
10334 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10335 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10336 if (NAME_NOT_FOUND != val) {
10337 available_instant_aec_modes[size] = (int32_t)val;
10338 size++;
10339 }
10340 }
10341 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10342 available_instant_aec_modes, size);
10343 }
10344
Thierry Strudel54dc9782017-02-15 12:12:10 -080010345 int32_t sharpness_range[] = {
10346 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10347 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10348 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10349
10350 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10351 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10352 size = 0;
10353 count = CAM_BINNING_CORRECTION_MODE_MAX;
10354 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10355 for (size_t i = 0; i < count; i++) {
10356 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10357 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10358 gCamCapability[cameraId]->supported_binning_modes[i]);
10359 if (NAME_NOT_FOUND != val) {
10360 avail_binning_modes[size] = (int32_t)val;
10361 size++;
10362 }
10363 }
10364 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10365 avail_binning_modes, size);
10366 }
10367
10368 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10369 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10370 size = 0;
10371 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10372 for (size_t i = 0; i < count; i++) {
10373 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10374 gCamCapability[cameraId]->supported_aec_modes[i]);
10375 if (NAME_NOT_FOUND != val)
10376 available_aec_modes[size++] = val;
10377 }
10378 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10379 available_aec_modes, size);
10380 }
10381
10382 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10383 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10384 size = 0;
10385 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10386 for (size_t i = 0; i < count; i++) {
10387 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10388 gCamCapability[cameraId]->supported_iso_modes[i]);
10389 if (NAME_NOT_FOUND != val)
10390 available_iso_modes[size++] = val;
10391 }
10392 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10393 available_iso_modes, size);
10394 }
10395
10396 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
10397 for (size_t i = 0; i < count; i++)
10398 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10399 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10400 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10401
10402 int32_t available_saturation_range[4];
10403 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10404 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10405 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10406 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10407 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10408 available_saturation_range, 4);
10409
10410 uint8_t is_hdr_values[2];
10411 is_hdr_values[0] = 0;
10412 is_hdr_values[1] = 1;
10413 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10414 is_hdr_values, 2);
10415
10416 float is_hdr_confidence_range[2];
10417 is_hdr_confidence_range[0] = 0.0;
10418 is_hdr_confidence_range[1] = 1.0;
10419 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10420 is_hdr_confidence_range, 2);
10421
Emilian Peev0a972ef2017-03-16 10:25:53 +000010422 size_t eepromLength = strnlen(
10423 reinterpret_cast<const char *>(
10424 gCamCapability[cameraId]->eeprom_version_info),
10425 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10426 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010427 char easelInfo[] = ",E:N";
10428 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10429 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10430 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010431 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10432 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010433 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010434 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10435 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10436 }
10437
Thierry Strudel3d639192016-09-09 11:52:26 -070010438 gStaticMetadata[cameraId] = staticInfo.release();
10439 return rc;
10440}
10441
10442/*===========================================================================
10443 * FUNCTION : makeTable
10444 *
10445 * DESCRIPTION: make a table of sizes
10446 *
10447 * PARAMETERS :
10448 *
10449 *
10450 *==========================================================================*/
10451void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10452 size_t max_size, int32_t *sizeTable)
10453{
10454 size_t j = 0;
10455 if (size > max_size) {
10456 size = max_size;
10457 }
10458 for (size_t i = 0; i < size; i++) {
10459 sizeTable[j] = dimTable[i].width;
10460 sizeTable[j+1] = dimTable[i].height;
10461 j+=2;
10462 }
10463}
10464
10465/*===========================================================================
10466 * FUNCTION : makeFPSTable
10467 *
10468 * DESCRIPTION: make a table of fps ranges
10469 *
10470 * PARAMETERS :
10471 *
10472 *==========================================================================*/
10473void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10474 size_t max_size, int32_t *fpsRangesTable)
10475{
10476 size_t j = 0;
10477 if (size > max_size) {
10478 size = max_size;
10479 }
10480 for (size_t i = 0; i < size; i++) {
10481 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10482 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10483 j+=2;
10484 }
10485}
10486
10487/*===========================================================================
10488 * FUNCTION : makeOverridesList
10489 *
10490 * DESCRIPTION: make a list of scene mode overrides
10491 *
10492 * PARAMETERS :
10493 *
10494 *
10495 *==========================================================================*/
10496void QCamera3HardwareInterface::makeOverridesList(
10497 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10498 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10499{
10500 /*daemon will give a list of overrides for all scene modes.
10501 However we should send the fwk only the overrides for the scene modes
10502 supported by the framework*/
10503 size_t j = 0;
10504 if (size > max_size) {
10505 size = max_size;
10506 }
10507 size_t focus_count = CAM_FOCUS_MODE_MAX;
10508 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10509 focus_count);
10510 for (size_t i = 0; i < size; i++) {
10511 bool supt = false;
10512 size_t index = supported_indexes[i];
10513 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10514 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10515 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10516 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10517 overridesTable[index].awb_mode);
10518 if (NAME_NOT_FOUND != val) {
10519 overridesList[j+1] = (uint8_t)val;
10520 }
10521 uint8_t focus_override = overridesTable[index].af_mode;
10522 for (size_t k = 0; k < focus_count; k++) {
10523 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10524 supt = true;
10525 break;
10526 }
10527 }
10528 if (supt) {
10529 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10530 focus_override);
10531 if (NAME_NOT_FOUND != val) {
10532 overridesList[j+2] = (uint8_t)val;
10533 }
10534 } else {
10535 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10536 }
10537 j+=3;
10538 }
10539}
10540
10541/*===========================================================================
10542 * FUNCTION : filterJpegSizes
10543 *
10544 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10545 * could be downscaled to
10546 *
10547 * PARAMETERS :
10548 *
10549 * RETURN : length of jpegSizes array
10550 *==========================================================================*/
10551
10552size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10553 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10554 uint8_t downscale_factor)
10555{
10556 if (0 == downscale_factor) {
10557 downscale_factor = 1;
10558 }
10559
10560 int32_t min_width = active_array_size.width / downscale_factor;
10561 int32_t min_height = active_array_size.height / downscale_factor;
10562 size_t jpegSizesCnt = 0;
10563 if (processedSizesCnt > maxCount) {
10564 processedSizesCnt = maxCount;
10565 }
10566 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10567 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10568 jpegSizes[jpegSizesCnt] = processedSizes[i];
10569 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10570 jpegSizesCnt += 2;
10571 }
10572 }
10573 return jpegSizesCnt;
10574}
10575
10576/*===========================================================================
10577 * FUNCTION : computeNoiseModelEntryS
10578 *
10579 * DESCRIPTION: function to map a given sensitivity to the S noise
10580 * model parameters in the DNG noise model.
10581 *
10582 * PARAMETERS : sens : the sensor sensitivity
10583 *
10584 ** RETURN : S (sensor amplification) noise
10585 *
10586 *==========================================================================*/
10587double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10588 double s = gCamCapability[mCameraId]->gradient_S * sens +
10589 gCamCapability[mCameraId]->offset_S;
10590 return ((s < 0.0) ? 0.0 : s);
10591}
10592
10593/*===========================================================================
10594 * FUNCTION : computeNoiseModelEntryO
10595 *
10596 * DESCRIPTION: function to map a given sensitivity to the O noise
10597 * model parameters in the DNG noise model.
10598 *
10599 * PARAMETERS : sens : the sensor sensitivity
10600 *
10601 ** RETURN : O (sensor readout) noise
10602 *
10603 *==========================================================================*/
10604double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10605 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10606 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10607 1.0 : (1.0 * sens / max_analog_sens);
10608 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10609 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10610 return ((o < 0.0) ? 0.0 : o);
10611}
10612
10613/*===========================================================================
10614 * FUNCTION : getSensorSensitivity
10615 *
10616 * DESCRIPTION: convert iso_mode to an integer value
10617 *
10618 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10619 *
10620 ** RETURN : sensitivity supported by sensor
10621 *
10622 *==========================================================================*/
10623int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10624{
10625 int32_t sensitivity;
10626
10627 switch (iso_mode) {
10628 case CAM_ISO_MODE_100:
10629 sensitivity = 100;
10630 break;
10631 case CAM_ISO_MODE_200:
10632 sensitivity = 200;
10633 break;
10634 case CAM_ISO_MODE_400:
10635 sensitivity = 400;
10636 break;
10637 case CAM_ISO_MODE_800:
10638 sensitivity = 800;
10639 break;
10640 case CAM_ISO_MODE_1600:
10641 sensitivity = 1600;
10642 break;
10643 default:
10644 sensitivity = -1;
10645 break;
10646 }
10647 return sensitivity;
10648}
10649
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010650int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010651 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010652 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10653 // to connect to Easel.
10654 bool doNotpowerOnEasel =
10655 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10656
10657 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010658 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10659 return OK;
10660 }
10661
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010662 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010663 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010664 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010665 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010666 return res;
10667 }
10668
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010669 EaselManagerClientOpened = true;
10670
10671 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010672 if (res != OK) {
10673 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10674 }
10675
10676 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010677 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010678
10679 // Expose enableZsl key only when HDR+ mode is enabled.
10680 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010681 }
10682
10683 return OK;
10684}
10685
Thierry Strudel3d639192016-09-09 11:52:26 -070010686/*===========================================================================
10687 * FUNCTION : getCamInfo
10688 *
10689 * DESCRIPTION: query camera capabilities
10690 *
10691 * PARAMETERS :
10692 * @cameraId : camera Id
10693 * @info : camera info struct to be filled in with camera capabilities
10694 *
10695 * RETURN : int type of status
10696 * NO_ERROR -- success
10697 * none-zero failure code
10698 *==========================================================================*/
10699int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10700 struct camera_info *info)
10701{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010702 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010703 int rc = 0;
10704
10705 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010706
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010707 {
10708 Mutex::Autolock l(gHdrPlusClientLock);
10709 rc = initHdrPlusClientLocked();
10710 if (rc != OK) {
10711 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10712 pthread_mutex_unlock(&gCamLock);
10713 return rc;
10714 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010715 }
10716
Thierry Strudel3d639192016-09-09 11:52:26 -070010717 if (NULL == gCamCapability[cameraId]) {
10718 rc = initCapabilities(cameraId);
10719 if (rc < 0) {
10720 pthread_mutex_unlock(&gCamLock);
10721 return rc;
10722 }
10723 }
10724
10725 if (NULL == gStaticMetadata[cameraId]) {
10726 rc = initStaticMetadata(cameraId);
10727 if (rc < 0) {
10728 pthread_mutex_unlock(&gCamLock);
10729 return rc;
10730 }
10731 }
10732
10733 switch(gCamCapability[cameraId]->position) {
10734 case CAM_POSITION_BACK:
10735 case CAM_POSITION_BACK_AUX:
10736 info->facing = CAMERA_FACING_BACK;
10737 break;
10738
10739 case CAM_POSITION_FRONT:
10740 case CAM_POSITION_FRONT_AUX:
10741 info->facing = CAMERA_FACING_FRONT;
10742 break;
10743
10744 default:
10745 LOGE("Unknown position type %d for camera id:%d",
10746 gCamCapability[cameraId]->position, cameraId);
10747 rc = -1;
10748 break;
10749 }
10750
10751
10752 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010753#ifndef USE_HAL_3_3
10754 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10755#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010756 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010757#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010758 info->static_camera_characteristics = gStaticMetadata[cameraId];
10759
10760 //For now assume both cameras can operate independently.
10761 info->conflicting_devices = NULL;
10762 info->conflicting_devices_length = 0;
10763
10764 //resource cost is 100 * MIN(1.0, m/M),
10765 //where m is throughput requirement with maximum stream configuration
10766 //and M is CPP maximum throughput.
10767 float max_fps = 0.0;
10768 for (uint32_t i = 0;
10769 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10770 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10771 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10772 }
10773 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10774 gCamCapability[cameraId]->active_array_size.width *
10775 gCamCapability[cameraId]->active_array_size.height * max_fps /
10776 gCamCapability[cameraId]->max_pixel_bandwidth;
10777 info->resource_cost = 100 * MIN(1.0, ratio);
10778 LOGI("camera %d resource cost is %d", cameraId,
10779 info->resource_cost);
10780
10781 pthread_mutex_unlock(&gCamLock);
10782 return rc;
10783}
10784
10785/*===========================================================================
10786 * FUNCTION : translateCapabilityToMetadata
10787 *
10788 * DESCRIPTION: translate the capability into camera_metadata_t
10789 *
10790 * PARAMETERS : type of the request
10791 *
10792 *
10793 * RETURN : success: camera_metadata_t*
10794 * failure: NULL
10795 *
10796 *==========================================================================*/
10797camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10798{
10799 if (mDefaultMetadata[type] != NULL) {
10800 return mDefaultMetadata[type];
10801 }
10802 //first time we are handling this request
10803 //fill up the metadata structure using the wrapper class
10804 CameraMetadata settings;
10805 //translate from cam_capability_t to camera_metadata_tag_t
10806 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10807 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10808 int32_t defaultRequestID = 0;
10809 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10810
10811 /* OIS disable */
10812 char ois_prop[PROPERTY_VALUE_MAX];
10813 memset(ois_prop, 0, sizeof(ois_prop));
10814 property_get("persist.camera.ois.disable", ois_prop, "0");
10815 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10816
10817 /* Force video to use OIS */
10818 char videoOisProp[PROPERTY_VALUE_MAX];
10819 memset(videoOisProp, 0, sizeof(videoOisProp));
10820 property_get("persist.camera.ois.video", videoOisProp, "1");
10821 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010822
10823 // Hybrid AE enable/disable
10824 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10825 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10826 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10827 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10828
Thierry Strudel3d639192016-09-09 11:52:26 -070010829 uint8_t controlIntent = 0;
10830 uint8_t focusMode;
10831 uint8_t vsMode;
10832 uint8_t optStabMode;
10833 uint8_t cacMode;
10834 uint8_t edge_mode;
10835 uint8_t noise_red_mode;
10836 uint8_t tonemap_mode;
10837 bool highQualityModeEntryAvailable = FALSE;
10838 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010839 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010840 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10841 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010842 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010843 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010844 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010845
Thierry Strudel3d639192016-09-09 11:52:26 -070010846 switch (type) {
10847 case CAMERA3_TEMPLATE_PREVIEW:
10848 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10849 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10850 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10851 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10852 edge_mode = ANDROID_EDGE_MODE_FAST;
10853 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10854 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10855 break;
10856 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10857 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10858 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10859 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10860 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10861 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10862 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10863 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10864 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10865 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10866 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10867 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10868 highQualityModeEntryAvailable = TRUE;
10869 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10870 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10871 fastModeEntryAvailable = TRUE;
10872 }
10873 }
10874 if (highQualityModeEntryAvailable) {
10875 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10876 } else if (fastModeEntryAvailable) {
10877 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10878 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010879 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10880 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10881 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010882 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010883 break;
10884 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10885 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10886 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10887 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010888 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10889 edge_mode = ANDROID_EDGE_MODE_FAST;
10890 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10891 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10892 if (forceVideoOis)
10893 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10894 break;
10895 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10896 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10897 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10898 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010899 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10900 edge_mode = ANDROID_EDGE_MODE_FAST;
10901 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10902 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10903 if (forceVideoOis)
10904 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10905 break;
10906 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10907 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10908 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10909 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10910 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10911 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10912 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10913 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10914 break;
10915 case CAMERA3_TEMPLATE_MANUAL:
10916 edge_mode = ANDROID_EDGE_MODE_FAST;
10917 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10918 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10919 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10920 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10921 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10922 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10923 break;
10924 default:
10925 edge_mode = ANDROID_EDGE_MODE_FAST;
10926 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10927 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10928 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10929 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10930 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10931 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10932 break;
10933 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010934 // Set CAC to OFF if underlying device doesn't support
10935 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10936 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10937 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010938 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10939 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10940 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10941 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10942 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10943 }
10944 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010945 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010946 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010947
10948 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10949 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10950 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10951 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10952 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10953 || ois_disable)
10954 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10955 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010956 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010957
10958 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10959 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10960
10961 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10962 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10963
10964 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10965 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10966
10967 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10968 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10969
10970 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10971 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10972
10973 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10974 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10975
10976 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10977 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10978
10979 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10980 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10981
10982 /*flash*/
10983 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10984 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10985
10986 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10987 settings.update(ANDROID_FLASH_FIRING_POWER,
10988 &flashFiringLevel, 1);
10989
10990 /* lens */
10991 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10992 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10993
10994 if (gCamCapability[mCameraId]->filter_densities_count) {
10995 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10996 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10997 gCamCapability[mCameraId]->filter_densities_count);
10998 }
10999
11000 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11001 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11002
Thierry Strudel3d639192016-09-09 11:52:26 -070011003 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11004 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11005
11006 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11007 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11008
11009 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11010 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11011
11012 /* face detection (default to OFF) */
11013 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11014 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11015
Thierry Strudel54dc9782017-02-15 12:12:10 -080011016 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11017 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011018
11019 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11020 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11021
11022 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11023 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11024
Thierry Strudel3d639192016-09-09 11:52:26 -070011025
11026 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11027 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11028
11029 /* Exposure time(Update the Min Exposure Time)*/
11030 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11031 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11032
11033 /* frame duration */
11034 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11035 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11036
11037 /* sensitivity */
11038 static const int32_t default_sensitivity = 100;
11039 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011040#ifndef USE_HAL_3_3
11041 static const int32_t default_isp_sensitivity =
11042 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11043 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11044#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011045
11046 /*edge mode*/
11047 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11048
11049 /*noise reduction mode*/
11050 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11051
11052 /*color correction mode*/
11053 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11054 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11055
11056 /*transform matrix mode*/
11057 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11058
11059 int32_t scaler_crop_region[4];
11060 scaler_crop_region[0] = 0;
11061 scaler_crop_region[1] = 0;
11062 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11063 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11064 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11065
11066 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11067 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11068
11069 /*focus distance*/
11070 float focus_distance = 0.0;
11071 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11072
11073 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011074 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011075 float max_range = 0.0;
11076 float max_fixed_fps = 0.0;
11077 int32_t fps_range[2] = {0, 0};
11078 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11079 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011080 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11081 TEMPLATE_MAX_PREVIEW_FPS) {
11082 continue;
11083 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011084 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11085 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11086 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11087 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11088 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11089 if (range > max_range) {
11090 fps_range[0] =
11091 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11092 fps_range[1] =
11093 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11094 max_range = range;
11095 }
11096 } else {
11097 if (range < 0.01 && max_fixed_fps <
11098 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11099 fps_range[0] =
11100 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11101 fps_range[1] =
11102 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11103 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11104 }
11105 }
11106 }
11107 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11108
11109 /*precapture trigger*/
11110 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11111 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11112
11113 /*af trigger*/
11114 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11115 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11116
11117 /* ae & af regions */
11118 int32_t active_region[] = {
11119 gCamCapability[mCameraId]->active_array_size.left,
11120 gCamCapability[mCameraId]->active_array_size.top,
11121 gCamCapability[mCameraId]->active_array_size.left +
11122 gCamCapability[mCameraId]->active_array_size.width,
11123 gCamCapability[mCameraId]->active_array_size.top +
11124 gCamCapability[mCameraId]->active_array_size.height,
11125 0};
11126 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11127 sizeof(active_region) / sizeof(active_region[0]));
11128 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11129 sizeof(active_region) / sizeof(active_region[0]));
11130
11131 /* black level lock */
11132 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11133 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11134
Thierry Strudel3d639192016-09-09 11:52:26 -070011135 //special defaults for manual template
11136 if (type == CAMERA3_TEMPLATE_MANUAL) {
11137 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11138 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11139
11140 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11141 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11142
11143 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11144 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11145
11146 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11147 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11148
11149 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11150 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11151
11152 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11153 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11154 }
11155
11156
11157 /* TNR
11158 * We'll use this location to determine which modes TNR will be set.
11159 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11160 * This is not to be confused with linking on a per stream basis that decision
11161 * is still on per-session basis and will be handled as part of config stream
11162 */
11163 uint8_t tnr_enable = 0;
11164
11165 if (m_bTnrPreview || m_bTnrVideo) {
11166
11167 switch (type) {
11168 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11169 tnr_enable = 1;
11170 break;
11171
11172 default:
11173 tnr_enable = 0;
11174 break;
11175 }
11176
11177 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11178 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11179 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11180
11181 LOGD("TNR:%d with process plate %d for template:%d",
11182 tnr_enable, tnr_process_type, type);
11183 }
11184
11185 //Update Link tags to default
11186 int32_t sync_type = CAM_TYPE_STANDALONE;
11187 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11188
11189 int32_t is_main = 0; //this doesn't matter as app should overwrite
11190 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11191
11192 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
11193
11194 /* CDS default */
11195 char prop[PROPERTY_VALUE_MAX];
11196 memset(prop, 0, sizeof(prop));
11197 property_get("persist.camera.CDS", prop, "Auto");
11198 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11199 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11200 if (CAM_CDS_MODE_MAX == cds_mode) {
11201 cds_mode = CAM_CDS_MODE_AUTO;
11202 }
11203
11204 /* Disabling CDS in templates which have TNR enabled*/
11205 if (tnr_enable)
11206 cds_mode = CAM_CDS_MODE_OFF;
11207
11208 int32_t mode = cds_mode;
11209 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011210
Thierry Strudel269c81a2016-10-12 12:13:59 -070011211 /* Manual Convergence AEC Speed is disabled by default*/
11212 float default_aec_speed = 0;
11213 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11214
11215 /* Manual Convergence AWB Speed is disabled by default*/
11216 float default_awb_speed = 0;
11217 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11218
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011219 // Set instant AEC to normal convergence by default
11220 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11221 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11222
Shuzhen Wang19463d72016-03-08 11:09:52 -080011223 /* hybrid ae */
11224 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11225
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011226 if (gExposeEnableZslKey) {
11227 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11228 }
11229
Thierry Strudel3d639192016-09-09 11:52:26 -070011230 mDefaultMetadata[type] = settings.release();
11231
11232 return mDefaultMetadata[type];
11233}
11234
11235/*===========================================================================
11236 * FUNCTION : setFrameParameters
11237 *
11238 * DESCRIPTION: set parameters per frame as requested in the metadata from
11239 * framework
11240 *
11241 * PARAMETERS :
11242 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011243 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011244 * @blob_request: Whether this request is a blob request or not
11245 *
11246 * RETURN : success: NO_ERROR
11247 * failure:
11248 *==========================================================================*/
11249int QCamera3HardwareInterface::setFrameParameters(
11250 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011251 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011252 int blob_request,
11253 uint32_t snapshotStreamId)
11254{
11255 /*translate from camera_metadata_t type to parm_type_t*/
11256 int rc = 0;
11257 int32_t hal_version = CAM_HAL_V3;
11258
11259 clear_metadata_buffer(mParameters);
11260 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11261 LOGE("Failed to set hal version in the parameters");
11262 return BAD_VALUE;
11263 }
11264
11265 /*we need to update the frame number in the parameters*/
11266 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11267 request->frame_number)) {
11268 LOGE("Failed to set the frame number in the parameters");
11269 return BAD_VALUE;
11270 }
11271
11272 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011273 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011274 LOGE("Failed to set stream type mask in the parameters");
11275 return BAD_VALUE;
11276 }
11277
11278 if (mUpdateDebugLevel) {
11279 uint32_t dummyDebugLevel = 0;
11280 /* The value of dummyDebugLevel is irrelavent. On
11281 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11282 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11283 dummyDebugLevel)) {
11284 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11285 return BAD_VALUE;
11286 }
11287 mUpdateDebugLevel = false;
11288 }
11289
11290 if(request->settings != NULL){
11291 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11292 if (blob_request)
11293 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11294 }
11295
11296 return rc;
11297}
11298
11299/*===========================================================================
11300 * FUNCTION : setReprocParameters
11301 *
11302 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11303 * return it.
11304 *
11305 * PARAMETERS :
11306 * @request : request that needs to be serviced
11307 *
11308 * RETURN : success: NO_ERROR
11309 * failure:
11310 *==========================================================================*/
11311int32_t QCamera3HardwareInterface::setReprocParameters(
11312 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11313 uint32_t snapshotStreamId)
11314{
11315 /*translate from camera_metadata_t type to parm_type_t*/
11316 int rc = 0;
11317
11318 if (NULL == request->settings){
11319 LOGE("Reprocess settings cannot be NULL");
11320 return BAD_VALUE;
11321 }
11322
11323 if (NULL == reprocParam) {
11324 LOGE("Invalid reprocessing metadata buffer");
11325 return BAD_VALUE;
11326 }
11327 clear_metadata_buffer(reprocParam);
11328
11329 /*we need to update the frame number in the parameters*/
11330 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11331 request->frame_number)) {
11332 LOGE("Failed to set the frame number in the parameters");
11333 return BAD_VALUE;
11334 }
11335
11336 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11337 if (rc < 0) {
11338 LOGE("Failed to translate reproc request");
11339 return rc;
11340 }
11341
11342 CameraMetadata frame_settings;
11343 frame_settings = request->settings;
11344 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11345 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11346 int32_t *crop_count =
11347 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11348 int32_t *crop_data =
11349 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11350 int32_t *roi_map =
11351 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11352 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11353 cam_crop_data_t crop_meta;
11354 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11355 crop_meta.num_of_streams = 1;
11356 crop_meta.crop_info[0].crop.left = crop_data[0];
11357 crop_meta.crop_info[0].crop.top = crop_data[1];
11358 crop_meta.crop_info[0].crop.width = crop_data[2];
11359 crop_meta.crop_info[0].crop.height = crop_data[3];
11360
11361 crop_meta.crop_info[0].roi_map.left =
11362 roi_map[0];
11363 crop_meta.crop_info[0].roi_map.top =
11364 roi_map[1];
11365 crop_meta.crop_info[0].roi_map.width =
11366 roi_map[2];
11367 crop_meta.crop_info[0].roi_map.height =
11368 roi_map[3];
11369
11370 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11371 rc = BAD_VALUE;
11372 }
11373 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11374 request->input_buffer->stream,
11375 crop_meta.crop_info[0].crop.left,
11376 crop_meta.crop_info[0].crop.top,
11377 crop_meta.crop_info[0].crop.width,
11378 crop_meta.crop_info[0].crop.height);
11379 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11380 request->input_buffer->stream,
11381 crop_meta.crop_info[0].roi_map.left,
11382 crop_meta.crop_info[0].roi_map.top,
11383 crop_meta.crop_info[0].roi_map.width,
11384 crop_meta.crop_info[0].roi_map.height);
11385 } else {
11386 LOGE("Invalid reprocess crop count %d!", *crop_count);
11387 }
11388 } else {
11389 LOGE("No crop data from matching output stream");
11390 }
11391
11392 /* These settings are not needed for regular requests so handle them specially for
11393 reprocess requests; information needed for EXIF tags */
11394 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11395 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11396 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11397 if (NAME_NOT_FOUND != val) {
11398 uint32_t flashMode = (uint32_t)val;
11399 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11400 rc = BAD_VALUE;
11401 }
11402 } else {
11403 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11404 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11405 }
11406 } else {
11407 LOGH("No flash mode in reprocess settings");
11408 }
11409
11410 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11411 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11412 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11413 rc = BAD_VALUE;
11414 }
11415 } else {
11416 LOGH("No flash state in reprocess settings");
11417 }
11418
11419 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11420 uint8_t *reprocessFlags =
11421 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11422 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11423 *reprocessFlags)) {
11424 rc = BAD_VALUE;
11425 }
11426 }
11427
Thierry Strudel54dc9782017-02-15 12:12:10 -080011428 // Add exif debug data to internal metadata
11429 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11430 mm_jpeg_debug_exif_params_t *debug_params =
11431 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11432 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11433 // AE
11434 if (debug_params->ae_debug_params_valid == TRUE) {
11435 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11436 debug_params->ae_debug_params);
11437 }
11438 // AWB
11439 if (debug_params->awb_debug_params_valid == TRUE) {
11440 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11441 debug_params->awb_debug_params);
11442 }
11443 // AF
11444 if (debug_params->af_debug_params_valid == TRUE) {
11445 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11446 debug_params->af_debug_params);
11447 }
11448 // ASD
11449 if (debug_params->asd_debug_params_valid == TRUE) {
11450 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11451 debug_params->asd_debug_params);
11452 }
11453 // Stats
11454 if (debug_params->stats_debug_params_valid == TRUE) {
11455 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11456 debug_params->stats_debug_params);
11457 }
11458 // BE Stats
11459 if (debug_params->bestats_debug_params_valid == TRUE) {
11460 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11461 debug_params->bestats_debug_params);
11462 }
11463 // BHIST
11464 if (debug_params->bhist_debug_params_valid == TRUE) {
11465 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11466 debug_params->bhist_debug_params);
11467 }
11468 // 3A Tuning
11469 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11470 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11471 debug_params->q3a_tuning_debug_params);
11472 }
11473 }
11474
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011475 // Add metadata which reprocess needs
11476 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11477 cam_reprocess_info_t *repro_info =
11478 (cam_reprocess_info_t *)frame_settings.find
11479 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011480 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011481 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011482 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011483 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011484 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011485 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011486 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011487 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011488 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011489 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011490 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011491 repro_info->pipeline_flip);
11492 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11493 repro_info->af_roi);
11494 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11495 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011496 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11497 CAM_INTF_PARM_ROTATION metadata then has been added in
11498 translateToHalMetadata. HAL need to keep this new rotation
11499 metadata. Otherwise, the old rotation info saved in the vendor tag
11500 would be used */
11501 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11502 CAM_INTF_PARM_ROTATION, reprocParam) {
11503 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11504 } else {
11505 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011506 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011507 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011508 }
11509
11510 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11511 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11512 roi.width and roi.height would be the final JPEG size.
11513 For now, HAL only checks this for reprocess request */
11514 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11515 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11516 uint8_t *enable =
11517 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11518 if (*enable == TRUE) {
11519 int32_t *crop_data =
11520 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11521 cam_stream_crop_info_t crop_meta;
11522 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11523 crop_meta.stream_id = 0;
11524 crop_meta.crop.left = crop_data[0];
11525 crop_meta.crop.top = crop_data[1];
11526 crop_meta.crop.width = crop_data[2];
11527 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011528 // The JPEG crop roi should match cpp output size
11529 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11530 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11531 crop_meta.roi_map.left = 0;
11532 crop_meta.roi_map.top = 0;
11533 crop_meta.roi_map.width = cpp_crop->crop.width;
11534 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011535 }
11536 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11537 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011538 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011539 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011540 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11541 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011542 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011543 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11544
11545 // Add JPEG scale information
11546 cam_dimension_t scale_dim;
11547 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11548 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11549 int32_t *roi =
11550 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11551 scale_dim.width = roi[2];
11552 scale_dim.height = roi[3];
11553 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11554 scale_dim);
11555 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11556 scale_dim.width, scale_dim.height, mCameraId);
11557 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011558 }
11559 }
11560
11561 return rc;
11562}
11563
11564/*===========================================================================
11565 * FUNCTION : saveRequestSettings
11566 *
11567 * DESCRIPTION: Add any settings that might have changed to the request settings
11568 * and save the settings to be applied on the frame
11569 *
11570 * PARAMETERS :
11571 * @jpegMetadata : the extracted and/or modified jpeg metadata
11572 * @request : request with initial settings
11573 *
11574 * RETURN :
11575 * camera_metadata_t* : pointer to the saved request settings
11576 *==========================================================================*/
11577camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11578 const CameraMetadata &jpegMetadata,
11579 camera3_capture_request_t *request)
11580{
11581 camera_metadata_t *resultMetadata;
11582 CameraMetadata camMetadata;
11583 camMetadata = request->settings;
11584
11585 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11586 int32_t thumbnail_size[2];
11587 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11588 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11589 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11590 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11591 }
11592
11593 if (request->input_buffer != NULL) {
11594 uint8_t reprocessFlags = 1;
11595 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11596 (uint8_t*)&reprocessFlags,
11597 sizeof(reprocessFlags));
11598 }
11599
11600 resultMetadata = camMetadata.release();
11601 return resultMetadata;
11602}
11603
11604/*===========================================================================
11605 * FUNCTION : setHalFpsRange
11606 *
11607 * DESCRIPTION: set FPS range parameter
11608 *
11609 *
11610 * PARAMETERS :
11611 * @settings : Metadata from framework
11612 * @hal_metadata: Metadata buffer
11613 *
11614 *
11615 * RETURN : success: NO_ERROR
11616 * failure:
11617 *==========================================================================*/
11618int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11619 metadata_buffer_t *hal_metadata)
11620{
11621 int32_t rc = NO_ERROR;
11622 cam_fps_range_t fps_range;
11623 fps_range.min_fps = (float)
11624 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11625 fps_range.max_fps = (float)
11626 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11627 fps_range.video_min_fps = fps_range.min_fps;
11628 fps_range.video_max_fps = fps_range.max_fps;
11629
11630 LOGD("aeTargetFpsRange fps: [%f %f]",
11631 fps_range.min_fps, fps_range.max_fps);
11632 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11633 * follows:
11634 * ---------------------------------------------------------------|
11635 * Video stream is absent in configure_streams |
11636 * (Camcorder preview before the first video record |
11637 * ---------------------------------------------------------------|
11638 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11639 * | | | vid_min/max_fps|
11640 * ---------------------------------------------------------------|
11641 * NO | [ 30, 240] | 240 | [240, 240] |
11642 * |-------------|-------------|----------------|
11643 * | [240, 240] | 240 | [240, 240] |
11644 * ---------------------------------------------------------------|
11645 * Video stream is present in configure_streams |
11646 * ---------------------------------------------------------------|
11647 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11648 * | | | vid_min/max_fps|
11649 * ---------------------------------------------------------------|
11650 * NO | [ 30, 240] | 240 | [240, 240] |
11651 * (camcorder prev |-------------|-------------|----------------|
11652 * after video rec | [240, 240] | 240 | [240, 240] |
11653 * is stopped) | | | |
11654 * ---------------------------------------------------------------|
11655 * YES | [ 30, 240] | 240 | [240, 240] |
11656 * |-------------|-------------|----------------|
11657 * | [240, 240] | 240 | [240, 240] |
11658 * ---------------------------------------------------------------|
11659 * When Video stream is absent in configure_streams,
11660 * preview fps = sensor_fps / batchsize
11661 * Eg: for 240fps at batchSize 4, preview = 60fps
11662 * for 120fps at batchSize 4, preview = 30fps
11663 *
11664 * When video stream is present in configure_streams, preview fps is as per
11665 * the ratio of preview buffers to video buffers requested in process
11666 * capture request
11667 */
11668 mBatchSize = 0;
11669 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11670 fps_range.min_fps = fps_range.video_max_fps;
11671 fps_range.video_min_fps = fps_range.video_max_fps;
11672 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11673 fps_range.max_fps);
11674 if (NAME_NOT_FOUND != val) {
11675 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11676 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11677 return BAD_VALUE;
11678 }
11679
11680 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11681 /* If batchmode is currently in progress and the fps changes,
11682 * set the flag to restart the sensor */
11683 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11684 (mHFRVideoFps != fps_range.max_fps)) {
11685 mNeedSensorRestart = true;
11686 }
11687 mHFRVideoFps = fps_range.max_fps;
11688 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11689 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11690 mBatchSize = MAX_HFR_BATCH_SIZE;
11691 }
11692 }
11693 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11694
11695 }
11696 } else {
11697 /* HFR mode is session param in backend/ISP. This should be reset when
11698 * in non-HFR mode */
11699 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11700 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11701 return BAD_VALUE;
11702 }
11703 }
11704 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11705 return BAD_VALUE;
11706 }
11707 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11708 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11709 return rc;
11710}
11711
11712/*===========================================================================
11713 * FUNCTION : translateToHalMetadata
11714 *
11715 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11716 *
11717 *
11718 * PARAMETERS :
11719 * @request : request sent from framework
11720 *
11721 *
11722 * RETURN : success: NO_ERROR
11723 * failure:
11724 *==========================================================================*/
11725int QCamera3HardwareInterface::translateToHalMetadata
11726 (const camera3_capture_request_t *request,
11727 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011728 uint32_t snapshotStreamId) {
11729 if (request == nullptr || hal_metadata == nullptr) {
11730 return BAD_VALUE;
11731 }
11732
11733 int64_t minFrameDuration = getMinFrameDuration(request);
11734
11735 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11736 minFrameDuration);
11737}
11738
11739int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11740 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11741 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11742
Thierry Strudel3d639192016-09-09 11:52:26 -070011743 int rc = 0;
11744 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011745 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011746
11747 /* Do not change the order of the following list unless you know what you are
11748 * doing.
11749 * The order is laid out in such a way that parameters in the front of the table
11750 * may be used to override the parameters later in the table. Examples are:
11751 * 1. META_MODE should precede AEC/AWB/AF MODE
11752 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11753 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11754 * 4. Any mode should precede it's corresponding settings
11755 */
11756 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11757 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11758 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11759 rc = BAD_VALUE;
11760 }
11761 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11762 if (rc != NO_ERROR) {
11763 LOGE("extractSceneMode failed");
11764 }
11765 }
11766
11767 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11768 uint8_t fwk_aeMode =
11769 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11770 uint8_t aeMode;
11771 int32_t redeye;
11772
11773 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11774 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011775 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11776 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011777 } else {
11778 aeMode = CAM_AE_MODE_ON;
11779 }
11780 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11781 redeye = 1;
11782 } else {
11783 redeye = 0;
11784 }
11785
11786 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11787 fwk_aeMode);
11788 if (NAME_NOT_FOUND != val) {
11789 int32_t flashMode = (int32_t)val;
11790 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11791 }
11792
11793 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11794 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11795 rc = BAD_VALUE;
11796 }
11797 }
11798
11799 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11800 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11801 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11802 fwk_whiteLevel);
11803 if (NAME_NOT_FOUND != val) {
11804 uint8_t whiteLevel = (uint8_t)val;
11805 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11806 rc = BAD_VALUE;
11807 }
11808 }
11809 }
11810
11811 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11812 uint8_t fwk_cacMode =
11813 frame_settings.find(
11814 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11815 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11816 fwk_cacMode);
11817 if (NAME_NOT_FOUND != val) {
11818 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11819 bool entryAvailable = FALSE;
11820 // Check whether Frameworks set CAC mode is supported in device or not
11821 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11822 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11823 entryAvailable = TRUE;
11824 break;
11825 }
11826 }
11827 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11828 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11829 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11830 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11831 if (entryAvailable == FALSE) {
11832 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11833 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11834 } else {
11835 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11836 // High is not supported and so set the FAST as spec say's underlying
11837 // device implementation can be the same for both modes.
11838 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11839 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11840 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11841 // in order to avoid the fps drop due to high quality
11842 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11843 } else {
11844 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11845 }
11846 }
11847 }
11848 LOGD("Final cacMode is %d", cacMode);
11849 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11850 rc = BAD_VALUE;
11851 }
11852 } else {
11853 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11854 }
11855 }
11856
Thierry Strudel2896d122017-02-23 19:18:03 -080011857 char af_value[PROPERTY_VALUE_MAX];
11858 property_get("persist.camera.af.infinity", af_value, "0");
11859
Jason Lee84ae9972017-02-24 13:24:24 -080011860 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011861 if (atoi(af_value) == 0) {
11862 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011863 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011864 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11865 fwk_focusMode);
11866 if (NAME_NOT_FOUND != val) {
11867 uint8_t focusMode = (uint8_t)val;
11868 LOGD("set focus mode %d", focusMode);
11869 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11870 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11871 rc = BAD_VALUE;
11872 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011873 }
11874 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011875 } else {
11876 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11877 LOGE("Focus forced to infinity %d", focusMode);
11878 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11879 rc = BAD_VALUE;
11880 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011881 }
11882
Jason Lee84ae9972017-02-24 13:24:24 -080011883 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11884 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011885 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11886 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11887 focalDistance)) {
11888 rc = BAD_VALUE;
11889 }
11890 }
11891
11892 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11893 uint8_t fwk_antibandingMode =
11894 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11895 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11896 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11897 if (NAME_NOT_FOUND != val) {
11898 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011899 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11900 if (m60HzZone) {
11901 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11902 } else {
11903 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11904 }
11905 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011906 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11907 hal_antibandingMode)) {
11908 rc = BAD_VALUE;
11909 }
11910 }
11911 }
11912
11913 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11914 int32_t expCompensation = frame_settings.find(
11915 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11916 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11917 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11918 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11919 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011920 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011921 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11922 expCompensation)) {
11923 rc = BAD_VALUE;
11924 }
11925 }
11926
11927 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11928 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11929 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11930 rc = BAD_VALUE;
11931 }
11932 }
11933 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11934 rc = setHalFpsRange(frame_settings, hal_metadata);
11935 if (rc != NO_ERROR) {
11936 LOGE("setHalFpsRange failed");
11937 }
11938 }
11939
11940 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11941 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11942 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11943 rc = BAD_VALUE;
11944 }
11945 }
11946
11947 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11948 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11949 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11950 fwk_effectMode);
11951 if (NAME_NOT_FOUND != val) {
11952 uint8_t effectMode = (uint8_t)val;
11953 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11954 rc = BAD_VALUE;
11955 }
11956 }
11957 }
11958
11959 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11960 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11961 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11962 colorCorrectMode)) {
11963 rc = BAD_VALUE;
11964 }
11965 }
11966
11967 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11968 cam_color_correct_gains_t colorCorrectGains;
11969 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11970 colorCorrectGains.gains[i] =
11971 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11972 }
11973 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11974 colorCorrectGains)) {
11975 rc = BAD_VALUE;
11976 }
11977 }
11978
11979 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11980 cam_color_correct_matrix_t colorCorrectTransform;
11981 cam_rational_type_t transform_elem;
11982 size_t num = 0;
11983 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11984 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11985 transform_elem.numerator =
11986 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11987 transform_elem.denominator =
11988 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11989 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11990 num++;
11991 }
11992 }
11993 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11994 colorCorrectTransform)) {
11995 rc = BAD_VALUE;
11996 }
11997 }
11998
11999 cam_trigger_t aecTrigger;
12000 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12001 aecTrigger.trigger_id = -1;
12002 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12003 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12004 aecTrigger.trigger =
12005 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12006 aecTrigger.trigger_id =
12007 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12008 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12009 aecTrigger)) {
12010 rc = BAD_VALUE;
12011 }
12012 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12013 aecTrigger.trigger, aecTrigger.trigger_id);
12014 }
12015
12016 /*af_trigger must come with a trigger id*/
12017 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12018 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12019 cam_trigger_t af_trigger;
12020 af_trigger.trigger =
12021 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12022 af_trigger.trigger_id =
12023 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12024 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12025 rc = BAD_VALUE;
12026 }
12027 LOGD("AfTrigger: %d AfTriggerID: %d",
12028 af_trigger.trigger, af_trigger.trigger_id);
12029 }
12030
12031 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12032 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12033 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12034 rc = BAD_VALUE;
12035 }
12036 }
12037 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12038 cam_edge_application_t edge_application;
12039 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012040
Thierry Strudel3d639192016-09-09 11:52:26 -070012041 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12042 edge_application.sharpness = 0;
12043 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012044 edge_application.sharpness =
12045 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12046 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12047 int32_t sharpness =
12048 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12049 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12050 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12051 LOGD("Setting edge mode sharpness %d", sharpness);
12052 edge_application.sharpness = sharpness;
12053 }
12054 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012055 }
12056 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12057 rc = BAD_VALUE;
12058 }
12059 }
12060
12061 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12062 int32_t respectFlashMode = 1;
12063 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12064 uint8_t fwk_aeMode =
12065 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012066 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12067 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12068 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012069 respectFlashMode = 0;
12070 LOGH("AE Mode controls flash, ignore android.flash.mode");
12071 }
12072 }
12073 if (respectFlashMode) {
12074 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12075 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12076 LOGH("flash mode after mapping %d", val);
12077 // To check: CAM_INTF_META_FLASH_MODE usage
12078 if (NAME_NOT_FOUND != val) {
12079 uint8_t flashMode = (uint8_t)val;
12080 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12081 rc = BAD_VALUE;
12082 }
12083 }
12084 }
12085 }
12086
12087 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12088 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12089 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12090 rc = BAD_VALUE;
12091 }
12092 }
12093
12094 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12095 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12096 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12097 flashFiringTime)) {
12098 rc = BAD_VALUE;
12099 }
12100 }
12101
12102 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12103 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12104 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12105 hotPixelMode)) {
12106 rc = BAD_VALUE;
12107 }
12108 }
12109
12110 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12111 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12112 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12113 lensAperture)) {
12114 rc = BAD_VALUE;
12115 }
12116 }
12117
12118 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12119 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12120 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12121 filterDensity)) {
12122 rc = BAD_VALUE;
12123 }
12124 }
12125
12126 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12127 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12128 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12129 focalLength)) {
12130 rc = BAD_VALUE;
12131 }
12132 }
12133
12134 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12135 uint8_t optStabMode =
12136 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12137 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12138 optStabMode)) {
12139 rc = BAD_VALUE;
12140 }
12141 }
12142
12143 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12144 uint8_t videoStabMode =
12145 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12146 LOGD("videoStabMode from APP = %d", videoStabMode);
12147 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12148 videoStabMode)) {
12149 rc = BAD_VALUE;
12150 }
12151 }
12152
12153
12154 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12155 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12156 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12157 noiseRedMode)) {
12158 rc = BAD_VALUE;
12159 }
12160 }
12161
12162 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12163 float reprocessEffectiveExposureFactor =
12164 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12165 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12166 reprocessEffectiveExposureFactor)) {
12167 rc = BAD_VALUE;
12168 }
12169 }
12170
12171 cam_crop_region_t scalerCropRegion;
12172 bool scalerCropSet = false;
12173 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12174 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12175 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12176 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12177 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12178
12179 // Map coordinate system from active array to sensor output.
12180 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12181 scalerCropRegion.width, scalerCropRegion.height);
12182
12183 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12184 scalerCropRegion)) {
12185 rc = BAD_VALUE;
12186 }
12187 scalerCropSet = true;
12188 }
12189
12190 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12191 int64_t sensorExpTime =
12192 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12193 LOGD("setting sensorExpTime %lld", sensorExpTime);
12194 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12195 sensorExpTime)) {
12196 rc = BAD_VALUE;
12197 }
12198 }
12199
12200 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12201 int64_t sensorFrameDuration =
12202 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012203 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12204 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12205 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12206 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12207 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12208 sensorFrameDuration)) {
12209 rc = BAD_VALUE;
12210 }
12211 }
12212
12213 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12214 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12215 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12216 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12217 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12218 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12219 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12220 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12221 sensorSensitivity)) {
12222 rc = BAD_VALUE;
12223 }
12224 }
12225
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012226#ifndef USE_HAL_3_3
12227 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12228 int32_t ispSensitivity =
12229 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12230 if (ispSensitivity <
12231 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12232 ispSensitivity =
12233 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12234 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12235 }
12236 if (ispSensitivity >
12237 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12238 ispSensitivity =
12239 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12240 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12241 }
12242 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12243 ispSensitivity)) {
12244 rc = BAD_VALUE;
12245 }
12246 }
12247#endif
12248
Thierry Strudel3d639192016-09-09 11:52:26 -070012249 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12250 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12251 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12252 rc = BAD_VALUE;
12253 }
12254 }
12255
12256 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12257 uint8_t fwk_facedetectMode =
12258 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12259
12260 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12261 fwk_facedetectMode);
12262
12263 if (NAME_NOT_FOUND != val) {
12264 uint8_t facedetectMode = (uint8_t)val;
12265 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12266 facedetectMode)) {
12267 rc = BAD_VALUE;
12268 }
12269 }
12270 }
12271
Thierry Strudel54dc9782017-02-15 12:12:10 -080012272 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012273 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012274 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012275 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12276 histogramMode)) {
12277 rc = BAD_VALUE;
12278 }
12279 }
12280
12281 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12282 uint8_t sharpnessMapMode =
12283 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12284 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12285 sharpnessMapMode)) {
12286 rc = BAD_VALUE;
12287 }
12288 }
12289
12290 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12291 uint8_t tonemapMode =
12292 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12293 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12294 rc = BAD_VALUE;
12295 }
12296 }
12297 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12298 /*All tonemap channels will have the same number of points*/
12299 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12300 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12301 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12302 cam_rgb_tonemap_curves tonemapCurves;
12303 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12304 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12305 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12306 tonemapCurves.tonemap_points_cnt,
12307 CAM_MAX_TONEMAP_CURVE_SIZE);
12308 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12309 }
12310
12311 /* ch0 = G*/
12312 size_t point = 0;
12313 cam_tonemap_curve_t tonemapCurveGreen;
12314 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12315 for (size_t j = 0; j < 2; j++) {
12316 tonemapCurveGreen.tonemap_points[i][j] =
12317 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12318 point++;
12319 }
12320 }
12321 tonemapCurves.curves[0] = tonemapCurveGreen;
12322
12323 /* ch 1 = B */
12324 point = 0;
12325 cam_tonemap_curve_t tonemapCurveBlue;
12326 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12327 for (size_t j = 0; j < 2; j++) {
12328 tonemapCurveBlue.tonemap_points[i][j] =
12329 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12330 point++;
12331 }
12332 }
12333 tonemapCurves.curves[1] = tonemapCurveBlue;
12334
12335 /* ch 2 = R */
12336 point = 0;
12337 cam_tonemap_curve_t tonemapCurveRed;
12338 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12339 for (size_t j = 0; j < 2; j++) {
12340 tonemapCurveRed.tonemap_points[i][j] =
12341 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12342 point++;
12343 }
12344 }
12345 tonemapCurves.curves[2] = tonemapCurveRed;
12346
12347 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12348 tonemapCurves)) {
12349 rc = BAD_VALUE;
12350 }
12351 }
12352
12353 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12354 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12355 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12356 captureIntent)) {
12357 rc = BAD_VALUE;
12358 }
12359 }
12360
12361 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12362 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12363 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12364 blackLevelLock)) {
12365 rc = BAD_VALUE;
12366 }
12367 }
12368
12369 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12370 uint8_t lensShadingMapMode =
12371 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12372 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12373 lensShadingMapMode)) {
12374 rc = BAD_VALUE;
12375 }
12376 }
12377
12378 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12379 cam_area_t roi;
12380 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012381 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012382
12383 // Map coordinate system from active array to sensor output.
12384 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12385 roi.rect.height);
12386
12387 if (scalerCropSet) {
12388 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12389 }
12390 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12391 rc = BAD_VALUE;
12392 }
12393 }
12394
12395 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12396 cam_area_t roi;
12397 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012398 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012399
12400 // Map coordinate system from active array to sensor output.
12401 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12402 roi.rect.height);
12403
12404 if (scalerCropSet) {
12405 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12406 }
12407 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12408 rc = BAD_VALUE;
12409 }
12410 }
12411
12412 // CDS for non-HFR non-video mode
12413 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12414 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12415 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12416 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12417 LOGE("Invalid CDS mode %d!", *fwk_cds);
12418 } else {
12419 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12420 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12421 rc = BAD_VALUE;
12422 }
12423 }
12424 }
12425
Thierry Strudel04e026f2016-10-10 11:27:36 -070012426 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012427 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012428 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012429 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12430 }
12431 if (m_bVideoHdrEnabled)
12432 vhdr = CAM_VIDEO_HDR_MODE_ON;
12433
Thierry Strudel54dc9782017-02-15 12:12:10 -080012434 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12435
12436 if(vhdr != curr_hdr_state)
12437 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12438
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012439 rc = setVideoHdrMode(mParameters, vhdr);
12440 if (rc != NO_ERROR) {
12441 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012442 }
12443
12444 //IR
12445 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12446 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12447 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012448 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12449 uint8_t isIRon = 0;
12450
12451 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012452 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12453 LOGE("Invalid IR mode %d!", fwk_ir);
12454 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012455 if(isIRon != curr_ir_state )
12456 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12457
Thierry Strudel04e026f2016-10-10 11:27:36 -070012458 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12459 CAM_INTF_META_IR_MODE, fwk_ir)) {
12460 rc = BAD_VALUE;
12461 }
12462 }
12463 }
12464
Thierry Strudel54dc9782017-02-15 12:12:10 -080012465 //Binning Correction Mode
12466 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12467 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12468 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12469 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12470 || (0 > fwk_binning_correction)) {
12471 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12472 } else {
12473 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12474 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12475 rc = BAD_VALUE;
12476 }
12477 }
12478 }
12479
Thierry Strudel269c81a2016-10-12 12:13:59 -070012480 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12481 float aec_speed;
12482 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12483 LOGD("AEC Speed :%f", aec_speed);
12484 if ( aec_speed < 0 ) {
12485 LOGE("Invalid AEC mode %f!", aec_speed);
12486 } else {
12487 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12488 aec_speed)) {
12489 rc = BAD_VALUE;
12490 }
12491 }
12492 }
12493
12494 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12495 float awb_speed;
12496 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12497 LOGD("AWB Speed :%f", awb_speed);
12498 if ( awb_speed < 0 ) {
12499 LOGE("Invalid AWB mode %f!", awb_speed);
12500 } else {
12501 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12502 awb_speed)) {
12503 rc = BAD_VALUE;
12504 }
12505 }
12506 }
12507
Thierry Strudel3d639192016-09-09 11:52:26 -070012508 // TNR
12509 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12510 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12511 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012512 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012513 cam_denoise_param_t tnr;
12514 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12515 tnr.process_plates =
12516 (cam_denoise_process_type_t)frame_settings.find(
12517 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12518 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012519
12520 if(b_TnrRequested != curr_tnr_state)
12521 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12522
Thierry Strudel3d639192016-09-09 11:52:26 -070012523 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12524 rc = BAD_VALUE;
12525 }
12526 }
12527
Thierry Strudel54dc9782017-02-15 12:12:10 -080012528 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012529 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012530 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012531 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12532 *exposure_metering_mode)) {
12533 rc = BAD_VALUE;
12534 }
12535 }
12536
Thierry Strudel3d639192016-09-09 11:52:26 -070012537 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12538 int32_t fwk_testPatternMode =
12539 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12540 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12541 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12542
12543 if (NAME_NOT_FOUND != testPatternMode) {
12544 cam_test_pattern_data_t testPatternData;
12545 memset(&testPatternData, 0, sizeof(testPatternData));
12546 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12547 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12548 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12549 int32_t *fwk_testPatternData =
12550 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12551 testPatternData.r = fwk_testPatternData[0];
12552 testPatternData.b = fwk_testPatternData[3];
12553 switch (gCamCapability[mCameraId]->color_arrangement) {
12554 case CAM_FILTER_ARRANGEMENT_RGGB:
12555 case CAM_FILTER_ARRANGEMENT_GRBG:
12556 testPatternData.gr = fwk_testPatternData[1];
12557 testPatternData.gb = fwk_testPatternData[2];
12558 break;
12559 case CAM_FILTER_ARRANGEMENT_GBRG:
12560 case CAM_FILTER_ARRANGEMENT_BGGR:
12561 testPatternData.gr = fwk_testPatternData[2];
12562 testPatternData.gb = fwk_testPatternData[1];
12563 break;
12564 default:
12565 LOGE("color arrangement %d is not supported",
12566 gCamCapability[mCameraId]->color_arrangement);
12567 break;
12568 }
12569 }
12570 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12571 testPatternData)) {
12572 rc = BAD_VALUE;
12573 }
12574 } else {
12575 LOGE("Invalid framework sensor test pattern mode %d",
12576 fwk_testPatternMode);
12577 }
12578 }
12579
12580 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12581 size_t count = 0;
12582 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12583 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12584 gps_coords.data.d, gps_coords.count, count);
12585 if (gps_coords.count != count) {
12586 rc = BAD_VALUE;
12587 }
12588 }
12589
12590 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12591 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12592 size_t count = 0;
12593 const char *gps_methods_src = (const char *)
12594 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12595 memset(gps_methods, '\0', sizeof(gps_methods));
12596 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12597 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12598 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12599 if (GPS_PROCESSING_METHOD_SIZE != count) {
12600 rc = BAD_VALUE;
12601 }
12602 }
12603
12604 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12605 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12606 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12607 gps_timestamp)) {
12608 rc = BAD_VALUE;
12609 }
12610 }
12611
12612 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12613 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12614 cam_rotation_info_t rotation_info;
12615 if (orientation == 0) {
12616 rotation_info.rotation = ROTATE_0;
12617 } else if (orientation == 90) {
12618 rotation_info.rotation = ROTATE_90;
12619 } else if (orientation == 180) {
12620 rotation_info.rotation = ROTATE_180;
12621 } else if (orientation == 270) {
12622 rotation_info.rotation = ROTATE_270;
12623 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012624 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012625 rotation_info.streamId = snapshotStreamId;
12626 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12627 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12628 rc = BAD_VALUE;
12629 }
12630 }
12631
12632 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12633 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12634 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12635 rc = BAD_VALUE;
12636 }
12637 }
12638
12639 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12640 uint32_t thumb_quality = (uint32_t)
12641 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12642 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12643 thumb_quality)) {
12644 rc = BAD_VALUE;
12645 }
12646 }
12647
12648 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12649 cam_dimension_t dim;
12650 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12651 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12652 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12653 rc = BAD_VALUE;
12654 }
12655 }
12656
12657 // Internal metadata
12658 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12659 size_t count = 0;
12660 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12661 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12662 privatedata.data.i32, privatedata.count, count);
12663 if (privatedata.count != count) {
12664 rc = BAD_VALUE;
12665 }
12666 }
12667
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012668 // ISO/Exposure Priority
12669 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12670 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12671 cam_priority_mode_t mode =
12672 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12673 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12674 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12675 use_iso_exp_pty.previewOnly = FALSE;
12676 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12677 use_iso_exp_pty.value = *ptr;
12678
12679 if(CAM_ISO_PRIORITY == mode) {
12680 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12681 use_iso_exp_pty)) {
12682 rc = BAD_VALUE;
12683 }
12684 }
12685 else {
12686 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12687 use_iso_exp_pty)) {
12688 rc = BAD_VALUE;
12689 }
12690 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012691
12692 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12693 rc = BAD_VALUE;
12694 }
12695 }
12696 } else {
12697 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12698 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012699 }
12700 }
12701
12702 // Saturation
12703 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12704 int32_t* use_saturation =
12705 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12706 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12707 rc = BAD_VALUE;
12708 }
12709 }
12710
Thierry Strudel3d639192016-09-09 11:52:26 -070012711 // EV step
12712 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12713 gCamCapability[mCameraId]->exp_compensation_step)) {
12714 rc = BAD_VALUE;
12715 }
12716
12717 // CDS info
12718 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12719 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12720 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12721
12722 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12723 CAM_INTF_META_CDS_DATA, *cdsData)) {
12724 rc = BAD_VALUE;
12725 }
12726 }
12727
Shuzhen Wang19463d72016-03-08 11:09:52 -080012728 // Hybrid AE
12729 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12730 uint8_t *hybrid_ae = (uint8_t *)
12731 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12732
12733 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12734 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12735 rc = BAD_VALUE;
12736 }
12737 }
12738
Shuzhen Wang14415f52016-11-16 18:26:18 -080012739 // Histogram
12740 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12741 uint8_t histogramMode =
12742 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12743 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12744 histogramMode)) {
12745 rc = BAD_VALUE;
12746 }
12747 }
12748
12749 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12750 int32_t histogramBins =
12751 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12752 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12753 histogramBins)) {
12754 rc = BAD_VALUE;
12755 }
12756 }
12757
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012758 // Tracking AF
12759 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12760 uint8_t trackingAfTrigger =
12761 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12762 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12763 trackingAfTrigger)) {
12764 rc = BAD_VALUE;
12765 }
12766 }
12767
Thierry Strudel3d639192016-09-09 11:52:26 -070012768 return rc;
12769}
12770
12771/*===========================================================================
12772 * FUNCTION : captureResultCb
12773 *
12774 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12775 *
12776 * PARAMETERS :
12777 * @frame : frame information from mm-camera-interface
12778 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12779 * @userdata: userdata
12780 *
12781 * RETURN : NONE
12782 *==========================================================================*/
12783void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12784 camera3_stream_buffer_t *buffer,
12785 uint32_t frame_number, bool isInputBuffer, void *userdata)
12786{
12787 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12788 if (hw == NULL) {
12789 LOGE("Invalid hw %p", hw);
12790 return;
12791 }
12792
12793 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12794 return;
12795}
12796
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012797/*===========================================================================
12798 * FUNCTION : setBufferErrorStatus
12799 *
12800 * DESCRIPTION: Callback handler for channels to report any buffer errors
12801 *
12802 * PARAMETERS :
12803 * @ch : Channel on which buffer error is reported from
12804 * @frame_number : frame number on which buffer error is reported on
12805 * @buffer_status : buffer error status
12806 * @userdata: userdata
12807 *
12808 * RETURN : NONE
12809 *==========================================================================*/
12810void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12811 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12812{
12813 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12814 if (hw == NULL) {
12815 LOGE("Invalid hw %p", hw);
12816 return;
12817 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012818
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012819 hw->setBufferErrorStatus(ch, frame_number, err);
12820 return;
12821}
12822
12823void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12824 uint32_t frameNumber, camera3_buffer_status_t err)
12825{
12826 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12827 pthread_mutex_lock(&mMutex);
12828
12829 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12830 if (req.frame_number != frameNumber)
12831 continue;
12832 for (auto& k : req.mPendingBufferList) {
12833 if(k.stream->priv == ch) {
12834 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12835 }
12836 }
12837 }
12838
12839 pthread_mutex_unlock(&mMutex);
12840 return;
12841}
Thierry Strudel3d639192016-09-09 11:52:26 -070012842/*===========================================================================
12843 * FUNCTION : initialize
12844 *
12845 * DESCRIPTION: Pass framework callback pointers to HAL
12846 *
12847 * PARAMETERS :
12848 *
12849 *
12850 * RETURN : Success : 0
12851 * Failure: -ENODEV
12852 *==========================================================================*/
12853
12854int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12855 const camera3_callback_ops_t *callback_ops)
12856{
12857 LOGD("E");
12858 QCamera3HardwareInterface *hw =
12859 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12860 if (!hw) {
12861 LOGE("NULL camera device");
12862 return -ENODEV;
12863 }
12864
12865 int rc = hw->initialize(callback_ops);
12866 LOGD("X");
12867 return rc;
12868}
12869
12870/*===========================================================================
12871 * FUNCTION : configure_streams
12872 *
12873 * DESCRIPTION:
12874 *
12875 * PARAMETERS :
12876 *
12877 *
12878 * RETURN : Success: 0
12879 * Failure: -EINVAL (if stream configuration is invalid)
12880 * -ENODEV (fatal error)
12881 *==========================================================================*/
12882
12883int QCamera3HardwareInterface::configure_streams(
12884 const struct camera3_device *device,
12885 camera3_stream_configuration_t *stream_list)
12886{
12887 LOGD("E");
12888 QCamera3HardwareInterface *hw =
12889 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12890 if (!hw) {
12891 LOGE("NULL camera device");
12892 return -ENODEV;
12893 }
12894 int rc = hw->configureStreams(stream_list);
12895 LOGD("X");
12896 return rc;
12897}
12898
12899/*===========================================================================
12900 * FUNCTION : construct_default_request_settings
12901 *
12902 * DESCRIPTION: Configure a settings buffer to meet the required use case
12903 *
12904 * PARAMETERS :
12905 *
12906 *
12907 * RETURN : Success: Return valid metadata
12908 * Failure: Return NULL
12909 *==========================================================================*/
12910const camera_metadata_t* QCamera3HardwareInterface::
12911 construct_default_request_settings(const struct camera3_device *device,
12912 int type)
12913{
12914
12915 LOGD("E");
12916 camera_metadata_t* fwk_metadata = NULL;
12917 QCamera3HardwareInterface *hw =
12918 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12919 if (!hw) {
12920 LOGE("NULL camera device");
12921 return NULL;
12922 }
12923
12924 fwk_metadata = hw->translateCapabilityToMetadata(type);
12925
12926 LOGD("X");
12927 return fwk_metadata;
12928}
12929
12930/*===========================================================================
12931 * FUNCTION : process_capture_request
12932 *
12933 * DESCRIPTION:
12934 *
12935 * PARAMETERS :
12936 *
12937 *
12938 * RETURN :
12939 *==========================================================================*/
12940int QCamera3HardwareInterface::process_capture_request(
12941 const struct camera3_device *device,
12942 camera3_capture_request_t *request)
12943{
12944 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012945 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012946 QCamera3HardwareInterface *hw =
12947 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12948 if (!hw) {
12949 LOGE("NULL camera device");
12950 return -EINVAL;
12951 }
12952
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012953 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012954 LOGD("X");
12955 return rc;
12956}
12957
12958/*===========================================================================
12959 * FUNCTION : dump
12960 *
12961 * DESCRIPTION:
12962 *
12963 * PARAMETERS :
12964 *
12965 *
12966 * RETURN :
12967 *==========================================================================*/
12968
12969void QCamera3HardwareInterface::dump(
12970 const struct camera3_device *device, int fd)
12971{
12972 /* Log level property is read when "adb shell dumpsys media.camera" is
12973 called so that the log level can be controlled without restarting
12974 the media server */
12975 getLogLevel();
12976
12977 LOGD("E");
12978 QCamera3HardwareInterface *hw =
12979 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12980 if (!hw) {
12981 LOGE("NULL camera device");
12982 return;
12983 }
12984
12985 hw->dump(fd);
12986 LOGD("X");
12987 return;
12988}
12989
12990/*===========================================================================
12991 * FUNCTION : flush
12992 *
12993 * DESCRIPTION:
12994 *
12995 * PARAMETERS :
12996 *
12997 *
12998 * RETURN :
12999 *==========================================================================*/
13000
13001int QCamera3HardwareInterface::flush(
13002 const struct camera3_device *device)
13003{
13004 int rc;
13005 LOGD("E");
13006 QCamera3HardwareInterface *hw =
13007 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13008 if (!hw) {
13009 LOGE("NULL camera device");
13010 return -EINVAL;
13011 }
13012
13013 pthread_mutex_lock(&hw->mMutex);
13014 // Validate current state
13015 switch (hw->mState) {
13016 case STARTED:
13017 /* valid state */
13018 break;
13019
13020 case ERROR:
13021 pthread_mutex_unlock(&hw->mMutex);
13022 hw->handleCameraDeviceError();
13023 return -ENODEV;
13024
13025 default:
13026 LOGI("Flush returned during state %d", hw->mState);
13027 pthread_mutex_unlock(&hw->mMutex);
13028 return 0;
13029 }
13030 pthread_mutex_unlock(&hw->mMutex);
13031
13032 rc = hw->flush(true /* restart channels */ );
13033 LOGD("X");
13034 return rc;
13035}
13036
13037/*===========================================================================
13038 * FUNCTION : close_camera_device
13039 *
13040 * DESCRIPTION:
13041 *
13042 * PARAMETERS :
13043 *
13044 *
13045 * RETURN :
13046 *==========================================================================*/
13047int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13048{
13049 int ret = NO_ERROR;
13050 QCamera3HardwareInterface *hw =
13051 reinterpret_cast<QCamera3HardwareInterface *>(
13052 reinterpret_cast<camera3_device_t *>(device)->priv);
13053 if (!hw) {
13054 LOGE("NULL camera device");
13055 return BAD_VALUE;
13056 }
13057
13058 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13059 delete hw;
13060 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013061 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013062 return ret;
13063}
13064
13065/*===========================================================================
13066 * FUNCTION : getWaveletDenoiseProcessPlate
13067 *
13068 * DESCRIPTION: query wavelet denoise process plate
13069 *
13070 * PARAMETERS : None
13071 *
13072 * RETURN : WNR prcocess plate value
13073 *==========================================================================*/
13074cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13075{
13076 char prop[PROPERTY_VALUE_MAX];
13077 memset(prop, 0, sizeof(prop));
13078 property_get("persist.denoise.process.plates", prop, "0");
13079 int processPlate = atoi(prop);
13080 switch(processPlate) {
13081 case 0:
13082 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13083 case 1:
13084 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13085 case 2:
13086 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13087 case 3:
13088 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13089 default:
13090 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13091 }
13092}
13093
13094
13095/*===========================================================================
13096 * FUNCTION : getTemporalDenoiseProcessPlate
13097 *
13098 * DESCRIPTION: query temporal denoise process plate
13099 *
13100 * PARAMETERS : None
13101 *
13102 * RETURN : TNR prcocess plate value
13103 *==========================================================================*/
13104cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13105{
13106 char prop[PROPERTY_VALUE_MAX];
13107 memset(prop, 0, sizeof(prop));
13108 property_get("persist.tnr.process.plates", prop, "0");
13109 int processPlate = atoi(prop);
13110 switch(processPlate) {
13111 case 0:
13112 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13113 case 1:
13114 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13115 case 2:
13116 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13117 case 3:
13118 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13119 default:
13120 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13121 }
13122}
13123
13124
13125/*===========================================================================
13126 * FUNCTION : extractSceneMode
13127 *
13128 * DESCRIPTION: Extract scene mode from frameworks set metadata
13129 *
13130 * PARAMETERS :
13131 * @frame_settings: CameraMetadata reference
13132 * @metaMode: ANDROID_CONTORL_MODE
13133 * @hal_metadata: hal metadata structure
13134 *
13135 * RETURN : None
13136 *==========================================================================*/
13137int32_t QCamera3HardwareInterface::extractSceneMode(
13138 const CameraMetadata &frame_settings, uint8_t metaMode,
13139 metadata_buffer_t *hal_metadata)
13140{
13141 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013142 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13143
13144 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13145 LOGD("Ignoring control mode OFF_KEEP_STATE");
13146 return NO_ERROR;
13147 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013148
13149 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13150 camera_metadata_ro_entry entry =
13151 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13152 if (0 == entry.count)
13153 return rc;
13154
13155 uint8_t fwk_sceneMode = entry.data.u8[0];
13156
13157 int val = lookupHalName(SCENE_MODES_MAP,
13158 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13159 fwk_sceneMode);
13160 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013161 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013162 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013163 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013164 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013165
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013166 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13167 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13168 }
13169
13170 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13171 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013172 cam_hdr_param_t hdr_params;
13173 hdr_params.hdr_enable = 1;
13174 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13175 hdr_params.hdr_need_1x = false;
13176 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13177 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13178 rc = BAD_VALUE;
13179 }
13180 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013181
Thierry Strudel3d639192016-09-09 11:52:26 -070013182 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13183 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13184 rc = BAD_VALUE;
13185 }
13186 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013187
13188 if (mForceHdrSnapshot) {
13189 cam_hdr_param_t hdr_params;
13190 hdr_params.hdr_enable = 1;
13191 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13192 hdr_params.hdr_need_1x = false;
13193 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13194 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13195 rc = BAD_VALUE;
13196 }
13197 }
13198
Thierry Strudel3d639192016-09-09 11:52:26 -070013199 return rc;
13200}
13201
13202/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013203 * FUNCTION : setVideoHdrMode
13204 *
13205 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13206 *
13207 * PARAMETERS :
13208 * @hal_metadata: hal metadata structure
13209 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13210 *
13211 * RETURN : None
13212 *==========================================================================*/
13213int32_t QCamera3HardwareInterface::setVideoHdrMode(
13214 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13215{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013216 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13217 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13218 }
13219
13220 LOGE("Invalid Video HDR mode %d!", vhdr);
13221 return BAD_VALUE;
13222}
13223
13224/*===========================================================================
13225 * FUNCTION : setSensorHDR
13226 *
13227 * DESCRIPTION: Enable/disable sensor HDR.
13228 *
13229 * PARAMETERS :
13230 * @hal_metadata: hal metadata structure
13231 * @enable: boolean whether to enable/disable sensor HDR
13232 *
13233 * RETURN : None
13234 *==========================================================================*/
13235int32_t QCamera3HardwareInterface::setSensorHDR(
13236 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13237{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013238 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013239 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13240
13241 if (enable) {
13242 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13243 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13244 #ifdef _LE_CAMERA_
13245 //Default to staggered HDR for IOT
13246 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13247 #else
13248 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13249 #endif
13250 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13251 }
13252
13253 bool isSupported = false;
13254 switch (sensor_hdr) {
13255 case CAM_SENSOR_HDR_IN_SENSOR:
13256 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13257 CAM_QCOM_FEATURE_SENSOR_HDR) {
13258 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013259 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013260 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013261 break;
13262 case CAM_SENSOR_HDR_ZIGZAG:
13263 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13264 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13265 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013266 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013267 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013268 break;
13269 case CAM_SENSOR_HDR_STAGGERED:
13270 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13271 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13272 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013273 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013274 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013275 break;
13276 case CAM_SENSOR_HDR_OFF:
13277 isSupported = true;
13278 LOGD("Turning off sensor HDR");
13279 break;
13280 default:
13281 LOGE("HDR mode %d not supported", sensor_hdr);
13282 rc = BAD_VALUE;
13283 break;
13284 }
13285
13286 if(isSupported) {
13287 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13288 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13289 rc = BAD_VALUE;
13290 } else {
13291 if(!isVideoHdrEnable)
13292 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013293 }
13294 }
13295 return rc;
13296}
13297
13298/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013299 * FUNCTION : needRotationReprocess
13300 *
13301 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13302 *
13303 * PARAMETERS : none
13304 *
13305 * RETURN : true: needed
13306 * false: no need
13307 *==========================================================================*/
13308bool QCamera3HardwareInterface::needRotationReprocess()
13309{
13310 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13311 // current rotation is not zero, and pp has the capability to process rotation
13312 LOGH("need do reprocess for rotation");
13313 return true;
13314 }
13315
13316 return false;
13317}
13318
13319/*===========================================================================
13320 * FUNCTION : needReprocess
13321 *
13322 * DESCRIPTION: if reprocess in needed
13323 *
13324 * PARAMETERS : none
13325 *
13326 * RETURN : true: needed
13327 * false: no need
13328 *==========================================================================*/
13329bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13330{
13331 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13332 // TODO: add for ZSL HDR later
13333 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13334 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13335 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13336 return true;
13337 } else {
13338 LOGH("already post processed frame");
13339 return false;
13340 }
13341 }
13342 return needRotationReprocess();
13343}
13344
13345/*===========================================================================
13346 * FUNCTION : needJpegExifRotation
13347 *
13348 * DESCRIPTION: if rotation from jpeg is needed
13349 *
13350 * PARAMETERS : none
13351 *
13352 * RETURN : true: needed
13353 * false: no need
13354 *==========================================================================*/
13355bool QCamera3HardwareInterface::needJpegExifRotation()
13356{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013357 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013358 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13359 LOGD("Need use Jpeg EXIF Rotation");
13360 return true;
13361 }
13362 return false;
13363}
13364
13365/*===========================================================================
13366 * FUNCTION : addOfflineReprocChannel
13367 *
13368 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13369 * coming from input channel
13370 *
13371 * PARAMETERS :
13372 * @config : reprocess configuration
13373 * @inputChHandle : pointer to the input (source) channel
13374 *
13375 *
13376 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13377 *==========================================================================*/
13378QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13379 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13380{
13381 int32_t rc = NO_ERROR;
13382 QCamera3ReprocessChannel *pChannel = NULL;
13383
13384 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013385 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13386 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013387 if (NULL == pChannel) {
13388 LOGE("no mem for reprocess channel");
13389 return NULL;
13390 }
13391
13392 rc = pChannel->initialize(IS_TYPE_NONE);
13393 if (rc != NO_ERROR) {
13394 LOGE("init reprocess channel failed, ret = %d", rc);
13395 delete pChannel;
13396 return NULL;
13397 }
13398
13399 // pp feature config
13400 cam_pp_feature_config_t pp_config;
13401 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13402
13403 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13404 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13405 & CAM_QCOM_FEATURE_DSDN) {
13406 //Use CPP CDS incase h/w supports it.
13407 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13408 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13409 }
13410 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13411 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13412 }
13413
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013414 if (config.hdr_param.hdr_enable) {
13415 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13416 pp_config.hdr_param = config.hdr_param;
13417 }
13418
13419 if (mForceHdrSnapshot) {
13420 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13421 pp_config.hdr_param.hdr_enable = 1;
13422 pp_config.hdr_param.hdr_need_1x = 0;
13423 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13424 }
13425
Thierry Strudel3d639192016-09-09 11:52:26 -070013426 rc = pChannel->addReprocStreamsFromSource(pp_config,
13427 config,
13428 IS_TYPE_NONE,
13429 mMetadataChannel);
13430
13431 if (rc != NO_ERROR) {
13432 delete pChannel;
13433 return NULL;
13434 }
13435 return pChannel;
13436}
13437
13438/*===========================================================================
13439 * FUNCTION : getMobicatMask
13440 *
13441 * DESCRIPTION: returns mobicat mask
13442 *
13443 * PARAMETERS : none
13444 *
13445 * RETURN : mobicat mask
13446 *
13447 *==========================================================================*/
13448uint8_t QCamera3HardwareInterface::getMobicatMask()
13449{
13450 return m_MobicatMask;
13451}
13452
13453/*===========================================================================
13454 * FUNCTION : setMobicat
13455 *
13456 * DESCRIPTION: set Mobicat on/off.
13457 *
13458 * PARAMETERS :
13459 * @params : none
13460 *
13461 * RETURN : int32_t type of status
13462 * NO_ERROR -- success
13463 * none-zero failure code
13464 *==========================================================================*/
13465int32_t QCamera3HardwareInterface::setMobicat()
13466{
13467 char value [PROPERTY_VALUE_MAX];
13468 property_get("persist.camera.mobicat", value, "0");
13469 int32_t ret = NO_ERROR;
13470 uint8_t enableMobi = (uint8_t)atoi(value);
13471
13472 if (enableMobi) {
13473 tune_cmd_t tune_cmd;
13474 tune_cmd.type = SET_RELOAD_CHROMATIX;
13475 tune_cmd.module = MODULE_ALL;
13476 tune_cmd.value = TRUE;
13477 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13478 CAM_INTF_PARM_SET_VFE_COMMAND,
13479 tune_cmd);
13480
13481 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13482 CAM_INTF_PARM_SET_PP_COMMAND,
13483 tune_cmd);
13484 }
13485 m_MobicatMask = enableMobi;
13486
13487 return ret;
13488}
13489
13490/*===========================================================================
13491* FUNCTION : getLogLevel
13492*
13493* DESCRIPTION: Reads the log level property into a variable
13494*
13495* PARAMETERS :
13496* None
13497*
13498* RETURN :
13499* None
13500*==========================================================================*/
13501void QCamera3HardwareInterface::getLogLevel()
13502{
13503 char prop[PROPERTY_VALUE_MAX];
13504 uint32_t globalLogLevel = 0;
13505
13506 property_get("persist.camera.hal.debug", prop, "0");
13507 int val = atoi(prop);
13508 if (0 <= val) {
13509 gCamHal3LogLevel = (uint32_t)val;
13510 }
13511
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013512 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013513 gKpiDebugLevel = atoi(prop);
13514
13515 property_get("persist.camera.global.debug", prop, "0");
13516 val = atoi(prop);
13517 if (0 <= val) {
13518 globalLogLevel = (uint32_t)val;
13519 }
13520
13521 /* Highest log level among hal.logs and global.logs is selected */
13522 if (gCamHal3LogLevel < globalLogLevel)
13523 gCamHal3LogLevel = globalLogLevel;
13524
13525 return;
13526}
13527
13528/*===========================================================================
13529 * FUNCTION : validateStreamRotations
13530 *
13531 * DESCRIPTION: Check if the rotations requested are supported
13532 *
13533 * PARAMETERS :
13534 * @stream_list : streams to be configured
13535 *
13536 * RETURN : NO_ERROR on success
13537 * -EINVAL on failure
13538 *
13539 *==========================================================================*/
13540int QCamera3HardwareInterface::validateStreamRotations(
13541 camera3_stream_configuration_t *streamList)
13542{
13543 int rc = NO_ERROR;
13544
13545 /*
13546 * Loop through all streams requested in configuration
13547 * Check if unsupported rotations have been requested on any of them
13548 */
13549 for (size_t j = 0; j < streamList->num_streams; j++){
13550 camera3_stream_t *newStream = streamList->streams[j];
13551
13552 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13553 bool isImplDef = (newStream->format ==
13554 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13555 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13556 isImplDef);
13557
13558 if (isRotated && (!isImplDef || isZsl)) {
13559 LOGE("Error: Unsupported rotation of %d requested for stream"
13560 "type:%d and stream format:%d",
13561 newStream->rotation, newStream->stream_type,
13562 newStream->format);
13563 rc = -EINVAL;
13564 break;
13565 }
13566 }
13567
13568 return rc;
13569}
13570
13571/*===========================================================================
13572* FUNCTION : getFlashInfo
13573*
13574* DESCRIPTION: Retrieve information about whether the device has a flash.
13575*
13576* PARAMETERS :
13577* @cameraId : Camera id to query
13578* @hasFlash : Boolean indicating whether there is a flash device
13579* associated with given camera
13580* @flashNode : If a flash device exists, this will be its device node.
13581*
13582* RETURN :
13583* None
13584*==========================================================================*/
13585void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13586 bool& hasFlash,
13587 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13588{
13589 cam_capability_t* camCapability = gCamCapability[cameraId];
13590 if (NULL == camCapability) {
13591 hasFlash = false;
13592 flashNode[0] = '\0';
13593 } else {
13594 hasFlash = camCapability->flash_available;
13595 strlcpy(flashNode,
13596 (char*)camCapability->flash_dev_name,
13597 QCAMERA_MAX_FILEPATH_LENGTH);
13598 }
13599}
13600
13601/*===========================================================================
13602* FUNCTION : getEepromVersionInfo
13603*
13604* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13605*
13606* PARAMETERS : None
13607*
13608* RETURN : string describing EEPROM version
13609* "\0" if no such info available
13610*==========================================================================*/
13611const char *QCamera3HardwareInterface::getEepromVersionInfo()
13612{
13613 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13614}
13615
13616/*===========================================================================
13617* FUNCTION : getLdafCalib
13618*
13619* DESCRIPTION: Retrieve Laser AF calibration data
13620*
13621* PARAMETERS : None
13622*
13623* RETURN : Two uint32_t describing laser AF calibration data
13624* NULL if none is available.
13625*==========================================================================*/
13626const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13627{
13628 if (mLdafCalibExist) {
13629 return &mLdafCalib[0];
13630 } else {
13631 return NULL;
13632 }
13633}
13634
13635/*===========================================================================
13636 * FUNCTION : dynamicUpdateMetaStreamInfo
13637 *
13638 * DESCRIPTION: This function:
13639 * (1) stops all the channels
13640 * (2) returns error on pending requests and buffers
13641 * (3) sends metastream_info in setparams
13642 * (4) starts all channels
13643 * This is useful when sensor has to be restarted to apply any
13644 * settings such as frame rate from a different sensor mode
13645 *
13646 * PARAMETERS : None
13647 *
13648 * RETURN : NO_ERROR on success
13649 * Error codes on failure
13650 *
13651 *==========================================================================*/
13652int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13653{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013654 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013655 int rc = NO_ERROR;
13656
13657 LOGD("E");
13658
13659 rc = stopAllChannels();
13660 if (rc < 0) {
13661 LOGE("stopAllChannels failed");
13662 return rc;
13663 }
13664
13665 rc = notifyErrorForPendingRequests();
13666 if (rc < 0) {
13667 LOGE("notifyErrorForPendingRequests failed");
13668 return rc;
13669 }
13670
13671 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13672 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13673 "Format:%d",
13674 mStreamConfigInfo.type[i],
13675 mStreamConfigInfo.stream_sizes[i].width,
13676 mStreamConfigInfo.stream_sizes[i].height,
13677 mStreamConfigInfo.postprocess_mask[i],
13678 mStreamConfigInfo.format[i]);
13679 }
13680
13681 /* Send meta stream info once again so that ISP can start */
13682 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13683 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13684 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13685 mParameters);
13686 if (rc < 0) {
13687 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13688 }
13689
13690 rc = startAllChannels();
13691 if (rc < 0) {
13692 LOGE("startAllChannels failed");
13693 return rc;
13694 }
13695
13696 LOGD("X");
13697 return rc;
13698}
13699
13700/*===========================================================================
13701 * FUNCTION : stopAllChannels
13702 *
13703 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13704 *
13705 * PARAMETERS : None
13706 *
13707 * RETURN : NO_ERROR on success
13708 * Error codes on failure
13709 *
13710 *==========================================================================*/
13711int32_t QCamera3HardwareInterface::stopAllChannels()
13712{
13713 int32_t rc = NO_ERROR;
13714
13715 LOGD("Stopping all channels");
13716 // Stop the Streams/Channels
13717 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13718 it != mStreamInfo.end(); it++) {
13719 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13720 if (channel) {
13721 channel->stop();
13722 }
13723 (*it)->status = INVALID;
13724 }
13725
13726 if (mSupportChannel) {
13727 mSupportChannel->stop();
13728 }
13729 if (mAnalysisChannel) {
13730 mAnalysisChannel->stop();
13731 }
13732 if (mRawDumpChannel) {
13733 mRawDumpChannel->stop();
13734 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013735 if (mHdrPlusRawSrcChannel) {
13736 mHdrPlusRawSrcChannel->stop();
13737 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013738 if (mMetadataChannel) {
13739 /* If content of mStreamInfo is not 0, there is metadata stream */
13740 mMetadataChannel->stop();
13741 }
13742
13743 LOGD("All channels stopped");
13744 return rc;
13745}
13746
13747/*===========================================================================
13748 * FUNCTION : startAllChannels
13749 *
13750 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13751 *
13752 * PARAMETERS : None
13753 *
13754 * RETURN : NO_ERROR on success
13755 * Error codes on failure
13756 *
13757 *==========================================================================*/
13758int32_t QCamera3HardwareInterface::startAllChannels()
13759{
13760 int32_t rc = NO_ERROR;
13761
13762 LOGD("Start all channels ");
13763 // Start the Streams/Channels
13764 if (mMetadataChannel) {
13765 /* If content of mStreamInfo is not 0, there is metadata stream */
13766 rc = mMetadataChannel->start();
13767 if (rc < 0) {
13768 LOGE("META channel start failed");
13769 return rc;
13770 }
13771 }
13772 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13773 it != mStreamInfo.end(); it++) {
13774 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13775 if (channel) {
13776 rc = channel->start();
13777 if (rc < 0) {
13778 LOGE("channel start failed");
13779 return rc;
13780 }
13781 }
13782 }
13783 if (mAnalysisChannel) {
13784 mAnalysisChannel->start();
13785 }
13786 if (mSupportChannel) {
13787 rc = mSupportChannel->start();
13788 if (rc < 0) {
13789 LOGE("Support channel start failed");
13790 return rc;
13791 }
13792 }
13793 if (mRawDumpChannel) {
13794 rc = mRawDumpChannel->start();
13795 if (rc < 0) {
13796 LOGE("RAW dump channel start failed");
13797 return rc;
13798 }
13799 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013800 if (mHdrPlusRawSrcChannel) {
13801 rc = mHdrPlusRawSrcChannel->start();
13802 if (rc < 0) {
13803 LOGE("HDR+ RAW channel start failed");
13804 return rc;
13805 }
13806 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013807
13808 LOGD("All channels started");
13809 return rc;
13810}
13811
13812/*===========================================================================
13813 * FUNCTION : notifyErrorForPendingRequests
13814 *
13815 * DESCRIPTION: This function sends error for all the pending requests/buffers
13816 *
13817 * PARAMETERS : None
13818 *
13819 * RETURN : Error codes
13820 * NO_ERROR on success
13821 *
13822 *==========================================================================*/
13823int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13824{
13825 int32_t rc = NO_ERROR;
13826 unsigned int frameNum = 0;
13827 camera3_capture_result_t result;
13828 camera3_stream_buffer_t *pStream_Buf = NULL;
13829
13830 memset(&result, 0, sizeof(camera3_capture_result_t));
13831
13832 if (mPendingRequestsList.size() > 0) {
13833 pendingRequestIterator i = mPendingRequestsList.begin();
13834 frameNum = i->frame_number;
13835 } else {
13836 /* There might still be pending buffers even though there are
13837 no pending requests. Setting the frameNum to MAX so that
13838 all the buffers with smaller frame numbers are returned */
13839 frameNum = UINT_MAX;
13840 }
13841
13842 LOGH("Oldest frame num on mPendingRequestsList = %u",
13843 frameNum);
13844
Emilian Peev7650c122017-01-19 08:24:33 -080013845 notifyErrorFoPendingDepthData(mDepthChannel);
13846
Thierry Strudel3d639192016-09-09 11:52:26 -070013847 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13848 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13849
13850 if (req->frame_number < frameNum) {
13851 // Send Error notify to frameworks for each buffer for which
13852 // metadata buffer is already sent
13853 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13854 req->frame_number, req->mPendingBufferList.size());
13855
13856 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13857 if (NULL == pStream_Buf) {
13858 LOGE("No memory for pending buffers array");
13859 return NO_MEMORY;
13860 }
13861 memset(pStream_Buf, 0,
13862 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13863 result.result = NULL;
13864 result.frame_number = req->frame_number;
13865 result.num_output_buffers = req->mPendingBufferList.size();
13866 result.output_buffers = pStream_Buf;
13867
13868 size_t index = 0;
13869 for (auto info = req->mPendingBufferList.begin();
13870 info != req->mPendingBufferList.end(); ) {
13871
13872 camera3_notify_msg_t notify_msg;
13873 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13874 notify_msg.type = CAMERA3_MSG_ERROR;
13875 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13876 notify_msg.message.error.error_stream = info->stream;
13877 notify_msg.message.error.frame_number = req->frame_number;
13878 pStream_Buf[index].acquire_fence = -1;
13879 pStream_Buf[index].release_fence = -1;
13880 pStream_Buf[index].buffer = info->buffer;
13881 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13882 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013883 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013884 index++;
13885 // Remove buffer from list
13886 info = req->mPendingBufferList.erase(info);
13887 }
13888
13889 // Remove this request from Map
13890 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13891 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13892 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13893
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013894 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013895
13896 delete [] pStream_Buf;
13897 } else {
13898
13899 // Go through the pending requests info and send error request to framework
13900 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13901
13902 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13903
13904 // Send error notify to frameworks
13905 camera3_notify_msg_t notify_msg;
13906 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13907 notify_msg.type = CAMERA3_MSG_ERROR;
13908 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13909 notify_msg.message.error.error_stream = NULL;
13910 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013911 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013912
13913 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13914 if (NULL == pStream_Buf) {
13915 LOGE("No memory for pending buffers array");
13916 return NO_MEMORY;
13917 }
13918 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13919
13920 result.result = NULL;
13921 result.frame_number = req->frame_number;
13922 result.input_buffer = i->input_buffer;
13923 result.num_output_buffers = req->mPendingBufferList.size();
13924 result.output_buffers = pStream_Buf;
13925
13926 size_t index = 0;
13927 for (auto info = req->mPendingBufferList.begin();
13928 info != req->mPendingBufferList.end(); ) {
13929 pStream_Buf[index].acquire_fence = -1;
13930 pStream_Buf[index].release_fence = -1;
13931 pStream_Buf[index].buffer = info->buffer;
13932 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13933 pStream_Buf[index].stream = info->stream;
13934 index++;
13935 // Remove buffer from list
13936 info = req->mPendingBufferList.erase(info);
13937 }
13938
13939 // Remove this request from Map
13940 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13941 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13942 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13943
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013944 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013945 delete [] pStream_Buf;
13946 i = erasePendingRequest(i);
13947 }
13948 }
13949
13950 /* Reset pending frame Drop list and requests list */
13951 mPendingFrameDropList.clear();
13952
13953 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13954 req.mPendingBufferList.clear();
13955 }
13956 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013957 LOGH("Cleared all the pending buffers ");
13958
13959 return rc;
13960}
13961
13962bool QCamera3HardwareInterface::isOnEncoder(
13963 const cam_dimension_t max_viewfinder_size,
13964 uint32_t width, uint32_t height)
13965{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013966 return ((width > (uint32_t)max_viewfinder_size.width) ||
13967 (height > (uint32_t)max_viewfinder_size.height) ||
13968 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13969 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013970}
13971
13972/*===========================================================================
13973 * FUNCTION : setBundleInfo
13974 *
13975 * DESCRIPTION: Set bundle info for all streams that are bundle.
13976 *
13977 * PARAMETERS : None
13978 *
13979 * RETURN : NO_ERROR on success
13980 * Error codes on failure
13981 *==========================================================================*/
13982int32_t QCamera3HardwareInterface::setBundleInfo()
13983{
13984 int32_t rc = NO_ERROR;
13985
13986 if (mChannelHandle) {
13987 cam_bundle_config_t bundleInfo;
13988 memset(&bundleInfo, 0, sizeof(bundleInfo));
13989 rc = mCameraHandle->ops->get_bundle_info(
13990 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13991 if (rc != NO_ERROR) {
13992 LOGE("get_bundle_info failed");
13993 return rc;
13994 }
13995 if (mAnalysisChannel) {
13996 mAnalysisChannel->setBundleInfo(bundleInfo);
13997 }
13998 if (mSupportChannel) {
13999 mSupportChannel->setBundleInfo(bundleInfo);
14000 }
14001 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14002 it != mStreamInfo.end(); it++) {
14003 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14004 channel->setBundleInfo(bundleInfo);
14005 }
14006 if (mRawDumpChannel) {
14007 mRawDumpChannel->setBundleInfo(bundleInfo);
14008 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014009 if (mHdrPlusRawSrcChannel) {
14010 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14011 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014012 }
14013
14014 return rc;
14015}
14016
14017/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014018 * FUNCTION : setInstantAEC
14019 *
14020 * DESCRIPTION: Set Instant AEC related params.
14021 *
14022 * PARAMETERS :
14023 * @meta: CameraMetadata reference
14024 *
14025 * RETURN : NO_ERROR on success
14026 * Error codes on failure
14027 *==========================================================================*/
14028int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14029{
14030 int32_t rc = NO_ERROR;
14031 uint8_t val = 0;
14032 char prop[PROPERTY_VALUE_MAX];
14033
14034 // First try to configure instant AEC from framework metadata
14035 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14036 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14037 }
14038
14039 // If framework did not set this value, try to read from set prop.
14040 if (val == 0) {
14041 memset(prop, 0, sizeof(prop));
14042 property_get("persist.camera.instant.aec", prop, "0");
14043 val = (uint8_t)atoi(prop);
14044 }
14045
14046 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14047 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14048 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14049 mInstantAEC = val;
14050 mInstantAECSettledFrameNumber = 0;
14051 mInstantAecFrameIdxCount = 0;
14052 LOGH("instantAEC value set %d",val);
14053 if (mInstantAEC) {
14054 memset(prop, 0, sizeof(prop));
14055 property_get("persist.camera.ae.instant.bound", prop, "10");
14056 int32_t aec_frame_skip_cnt = atoi(prop);
14057 if (aec_frame_skip_cnt >= 0) {
14058 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14059 } else {
14060 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14061 rc = BAD_VALUE;
14062 }
14063 }
14064 } else {
14065 LOGE("Bad instant aec value set %d", val);
14066 rc = BAD_VALUE;
14067 }
14068 return rc;
14069}
14070
14071/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014072 * FUNCTION : get_num_overall_buffers
14073 *
14074 * DESCRIPTION: Estimate number of pending buffers across all requests.
14075 *
14076 * PARAMETERS : None
14077 *
14078 * RETURN : Number of overall pending buffers
14079 *
14080 *==========================================================================*/
14081uint32_t PendingBuffersMap::get_num_overall_buffers()
14082{
14083 uint32_t sum_buffers = 0;
14084 for (auto &req : mPendingBuffersInRequest) {
14085 sum_buffers += req.mPendingBufferList.size();
14086 }
14087 return sum_buffers;
14088}
14089
14090/*===========================================================================
14091 * FUNCTION : removeBuf
14092 *
14093 * DESCRIPTION: Remove a matching buffer from tracker.
14094 *
14095 * PARAMETERS : @buffer: image buffer for the callback
14096 *
14097 * RETURN : None
14098 *
14099 *==========================================================================*/
14100void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14101{
14102 bool buffer_found = false;
14103 for (auto req = mPendingBuffersInRequest.begin();
14104 req != mPendingBuffersInRequest.end(); req++) {
14105 for (auto k = req->mPendingBufferList.begin();
14106 k != req->mPendingBufferList.end(); k++ ) {
14107 if (k->buffer == buffer) {
14108 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14109 req->frame_number, buffer);
14110 k = req->mPendingBufferList.erase(k);
14111 if (req->mPendingBufferList.empty()) {
14112 // Remove this request from Map
14113 req = mPendingBuffersInRequest.erase(req);
14114 }
14115 buffer_found = true;
14116 break;
14117 }
14118 }
14119 if (buffer_found) {
14120 break;
14121 }
14122 }
14123 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14124 get_num_overall_buffers());
14125}
14126
14127/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014128 * FUNCTION : getBufErrStatus
14129 *
14130 * DESCRIPTION: get buffer error status
14131 *
14132 * PARAMETERS : @buffer: buffer handle
14133 *
14134 * RETURN : Error status
14135 *
14136 *==========================================================================*/
14137int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14138{
14139 for (auto& req : mPendingBuffersInRequest) {
14140 for (auto& k : req.mPendingBufferList) {
14141 if (k.buffer == buffer)
14142 return k.bufStatus;
14143 }
14144 }
14145 return CAMERA3_BUFFER_STATUS_OK;
14146}
14147
14148/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014149 * FUNCTION : setPAAFSupport
14150 *
14151 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14152 * feature mask according to stream type and filter
14153 * arrangement
14154 *
14155 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14156 * @stream_type: stream type
14157 * @filter_arrangement: filter arrangement
14158 *
14159 * RETURN : None
14160 *==========================================================================*/
14161void QCamera3HardwareInterface::setPAAFSupport(
14162 cam_feature_mask_t& feature_mask,
14163 cam_stream_type_t stream_type,
14164 cam_color_filter_arrangement_t filter_arrangement)
14165{
Thierry Strudel3d639192016-09-09 11:52:26 -070014166 switch (filter_arrangement) {
14167 case CAM_FILTER_ARRANGEMENT_RGGB:
14168 case CAM_FILTER_ARRANGEMENT_GRBG:
14169 case CAM_FILTER_ARRANGEMENT_GBRG:
14170 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014171 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14172 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014173 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014174 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14175 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014176 }
14177 break;
14178 case CAM_FILTER_ARRANGEMENT_Y:
14179 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14180 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14181 }
14182 break;
14183 default:
14184 break;
14185 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014186 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14187 feature_mask, stream_type, filter_arrangement);
14188
14189
Thierry Strudel3d639192016-09-09 11:52:26 -070014190}
14191
14192/*===========================================================================
14193* FUNCTION : getSensorMountAngle
14194*
14195* DESCRIPTION: Retrieve sensor mount angle
14196*
14197* PARAMETERS : None
14198*
14199* RETURN : sensor mount angle in uint32_t
14200*==========================================================================*/
14201uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14202{
14203 return gCamCapability[mCameraId]->sensor_mount_angle;
14204}
14205
14206/*===========================================================================
14207* FUNCTION : getRelatedCalibrationData
14208*
14209* DESCRIPTION: Retrieve related system calibration data
14210*
14211* PARAMETERS : None
14212*
14213* RETURN : Pointer of related system calibration data
14214*==========================================================================*/
14215const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14216{
14217 return (const cam_related_system_calibration_data_t *)
14218 &(gCamCapability[mCameraId]->related_cam_calibration);
14219}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014220
14221/*===========================================================================
14222 * FUNCTION : is60HzZone
14223 *
14224 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14225 *
14226 * PARAMETERS : None
14227 *
14228 * RETURN : True if in 60Hz zone, False otherwise
14229 *==========================================================================*/
14230bool QCamera3HardwareInterface::is60HzZone()
14231{
14232 time_t t = time(NULL);
14233 struct tm lt;
14234
14235 struct tm* r = localtime_r(&t, &lt);
14236
14237 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14238 return true;
14239 else
14240 return false;
14241}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014242
14243/*===========================================================================
14244 * FUNCTION : adjustBlackLevelForCFA
14245 *
14246 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14247 * of bayer CFA (Color Filter Array).
14248 *
14249 * PARAMETERS : @input: black level pattern in the order of RGGB
14250 * @output: black level pattern in the order of CFA
14251 * @color_arrangement: CFA color arrangement
14252 *
14253 * RETURN : None
14254 *==========================================================================*/
14255template<typename T>
14256void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14257 T input[BLACK_LEVEL_PATTERN_CNT],
14258 T output[BLACK_LEVEL_PATTERN_CNT],
14259 cam_color_filter_arrangement_t color_arrangement)
14260{
14261 switch (color_arrangement) {
14262 case CAM_FILTER_ARRANGEMENT_GRBG:
14263 output[0] = input[1];
14264 output[1] = input[0];
14265 output[2] = input[3];
14266 output[3] = input[2];
14267 break;
14268 case CAM_FILTER_ARRANGEMENT_GBRG:
14269 output[0] = input[2];
14270 output[1] = input[3];
14271 output[2] = input[0];
14272 output[3] = input[1];
14273 break;
14274 case CAM_FILTER_ARRANGEMENT_BGGR:
14275 output[0] = input[3];
14276 output[1] = input[2];
14277 output[2] = input[1];
14278 output[3] = input[0];
14279 break;
14280 case CAM_FILTER_ARRANGEMENT_RGGB:
14281 output[0] = input[0];
14282 output[1] = input[1];
14283 output[2] = input[2];
14284 output[3] = input[3];
14285 break;
14286 default:
14287 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14288 break;
14289 }
14290}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014291
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014292void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14293 CameraMetadata &resultMetadata,
14294 std::shared_ptr<metadata_buffer_t> settings)
14295{
14296 if (settings == nullptr) {
14297 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14298 return;
14299 }
14300
14301 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14302 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14303 }
14304
14305 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14306 String8 str((const char *)gps_methods);
14307 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14308 }
14309
14310 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14311 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14312 }
14313
14314 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14315 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14316 }
14317
14318 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14319 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14320 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14321 }
14322
14323 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14324 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14325 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14326 }
14327
14328 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14329 int32_t fwk_thumb_size[2];
14330 fwk_thumb_size[0] = thumb_size->width;
14331 fwk_thumb_size[1] = thumb_size->height;
14332 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14333 }
14334
14335 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14336 uint8_t fwk_intent = intent[0];
14337 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14338 }
14339}
14340
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014341bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14342 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14343 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014344{
14345 if (hdrPlusRequest == nullptr) return false;
14346
14347 // Check noise reduction mode is high quality.
14348 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14349 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14350 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014351 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14352 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014353 return false;
14354 }
14355
14356 // Check edge mode is high quality.
14357 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14358 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14359 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14360 return false;
14361 }
14362
14363 if (request.num_output_buffers != 1 ||
14364 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14365 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014366 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14367 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14368 request.output_buffers[0].stream->width,
14369 request.output_buffers[0].stream->height,
14370 request.output_buffers[0].stream->format);
14371 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014372 return false;
14373 }
14374
14375 // Get a YUV buffer from pic channel.
14376 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14377 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14378 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14379 if (res != OK) {
14380 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14381 __FUNCTION__, strerror(-res), res);
14382 return false;
14383 }
14384
14385 pbcamera::StreamBuffer buffer;
14386 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014387 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014388 buffer.data = yuvBuffer->buffer;
14389 buffer.dataSize = yuvBuffer->frame_len;
14390
14391 pbcamera::CaptureRequest pbRequest;
14392 pbRequest.id = request.frame_number;
14393 pbRequest.outputBuffers.push_back(buffer);
14394
14395 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014396 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014397 if (res != OK) {
14398 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14399 strerror(-res), res);
14400 return false;
14401 }
14402
14403 hdrPlusRequest->yuvBuffer = yuvBuffer;
14404 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14405
14406 return true;
14407}
14408
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014409status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked() {
14410 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14411 return OK;
14412 }
14413
14414 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14415 if (res != OK) {
14416 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14417 strerror(-res), res);
14418 return res;
14419 }
14420 gHdrPlusClientOpening = true;
14421
14422 return OK;
14423}
14424
Chien-Yu Chenee335912017-02-09 17:53:20 -080014425status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14426{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014427 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014428
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014429 // Check if gHdrPlusClient is opened or being opened.
14430 if (gHdrPlusClient == nullptr) {
14431 if (gHdrPlusClientOpening) {
14432 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14433 return OK;
14434 }
14435
14436 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014437 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014438 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14439 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014440 return res;
14441 }
14442
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014443 // When opening HDR+ client completes, HDR+ mode will be enabled.
14444 return OK;
14445
Chien-Yu Chenee335912017-02-09 17:53:20 -080014446 }
14447
14448 // Configure stream for HDR+.
14449 res = configureHdrPlusStreamsLocked();
14450 if (res != OK) {
14451 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014452 return res;
14453 }
14454
14455 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14456 res = gHdrPlusClient->setZslHdrPlusMode(true);
14457 if (res != OK) {
14458 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014459 return res;
14460 }
14461
14462 mHdrPlusModeEnabled = true;
14463 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14464
14465 return OK;
14466}
14467
14468void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14469{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014470 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014471 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014472 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14473 if (res != OK) {
14474 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14475 }
Chien-Yu Chenee335912017-02-09 17:53:20 -080014476 }
14477
14478 mHdrPlusModeEnabled = false;
14479 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14480}
14481
14482status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014483{
14484 pbcamera::InputConfiguration inputConfig;
14485 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14486 status_t res = OK;
14487
14488 // Configure HDR+ client streams.
14489 // Get input config.
14490 if (mHdrPlusRawSrcChannel) {
14491 // HDR+ input buffers will be provided by HAL.
14492 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14493 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14494 if (res != OK) {
14495 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14496 __FUNCTION__, strerror(-res), res);
14497 return res;
14498 }
14499
14500 inputConfig.isSensorInput = false;
14501 } else {
14502 // Sensor MIPI will send data to Easel.
14503 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014504 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014505 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14506 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14507 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14508 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14509 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14510 if (mSensorModeInfo.num_raw_bits != 10) {
14511 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14512 mSensorModeInfo.num_raw_bits);
14513 return BAD_VALUE;
14514 }
14515
14516 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014517 }
14518
14519 // Get output configurations.
14520 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014521 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014522
14523 // Easel may need to output YUV output buffers if mPictureChannel was created.
14524 pbcamera::StreamConfiguration yuvOutputConfig;
14525 if (mPictureChannel != nullptr) {
14526 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14527 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14528 if (res != OK) {
14529 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14530 __FUNCTION__, strerror(-res), res);
14531
14532 return res;
14533 }
14534
14535 outputStreamConfigs.push_back(yuvOutputConfig);
14536 }
14537
14538 // TODO: consider other channels for YUV output buffers.
14539
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014540 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014541 if (res != OK) {
14542 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14543 strerror(-res), res);
14544 return res;
14545 }
14546
14547 return OK;
14548}
14549
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014550void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client) {
14551 if (client == nullptr) {
14552 ALOGE("%s: Opened client is null.", __FUNCTION__);
14553 return;
14554 }
14555
14556 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14557
14558 Mutex::Autolock l(gHdrPlusClientLock);
14559 gHdrPlusClient = std::move(client);
14560 gHdrPlusClientOpening = false;
14561
14562 // Set static metadata.
14563 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14564 if (res != OK) {
14565 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14566 __FUNCTION__, strerror(-res), res);
14567 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14568 gHdrPlusClient = nullptr;
14569 return;
14570 }
14571
14572 // Enable HDR+ mode.
14573 res = enableHdrPlusModeLocked();
14574 if (res != OK) {
14575 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14576 }
14577}
14578
14579void QCamera3HardwareInterface::onOpenFailed(status_t err) {
14580 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14581 Mutex::Autolock l(gHdrPlusClientLock);
14582 gHdrPlusClientOpening = false;
14583}
14584
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014585void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14586 const camera_metadata_t &resultMetadata) {
14587 if (result != nullptr) {
14588 if (result->outputBuffers.size() != 1) {
14589 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14590 result->outputBuffers.size());
14591 return;
14592 }
14593
14594 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14595 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14596 result->outputBuffers[0].streamId);
14597 return;
14598 }
14599
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014600 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014601 HdrPlusPendingRequest pendingRequest;
14602 {
14603 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14604 auto req = mHdrPlusPendingRequests.find(result->requestId);
14605 pendingRequest = req->second;
14606 }
14607
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014608 // Update the result metadata with the settings of the HDR+ still capture request because
14609 // the result metadata belongs to a ZSL buffer.
14610 CameraMetadata metadata;
14611 metadata = &resultMetadata;
14612 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14613 camera_metadata_t* updatedResultMetadata = metadata.release();
14614
14615 QCamera3PicChannel *picChannel =
14616 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14617
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014618 // Check if dumping HDR+ YUV output is enabled.
14619 char prop[PROPERTY_VALUE_MAX];
14620 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14621 bool dumpYuvOutput = atoi(prop);
14622
14623 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014624 // Dump yuv buffer to a ppm file.
14625 pbcamera::StreamConfiguration outputConfig;
14626 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14627 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14628 if (rc == OK) {
14629 char buf[FILENAME_MAX] = {};
14630 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14631 result->requestId, result->outputBuffers[0].streamId,
14632 outputConfig.image.width, outputConfig.image.height);
14633
14634 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14635 } else {
14636 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14637 __FUNCTION__, strerror(-rc), rc);
14638 }
14639 }
14640
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014641 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14642 auto halMetadata = std::make_shared<metadata_buffer_t>();
14643 clear_metadata_buffer(halMetadata.get());
14644
14645 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14646 // encoding.
14647 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14648 halStreamId, /*minFrameDuration*/0);
14649 if (res == OK) {
14650 // Return the buffer to pic channel for encoding.
14651 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14652 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14653 halMetadata);
14654 } else {
14655 // Return the buffer without encoding.
14656 // TODO: This should not happen but we may want to report an error buffer to camera
14657 // service.
14658 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14659 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14660 strerror(-res), res);
14661 }
14662
14663 // Send HDR+ metadata to framework.
14664 {
14665 pthread_mutex_lock(&mMutex);
14666
14667 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14668 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14669 pthread_mutex_unlock(&mMutex);
14670 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014671
14672 // Remove the HDR+ pending request.
14673 {
14674 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14675 auto req = mHdrPlusPendingRequests.find(result->requestId);
14676 mHdrPlusPendingRequests.erase(req);
14677 }
14678 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014679}
14680
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014681void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14682 // TODO: Handle HDR+ capture failures and send the failure to framework.
14683 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14684 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14685
14686 // Return the buffer to pic channel.
14687 QCamera3PicChannel *picChannel =
14688 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14689 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14690
14691 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014692}
14693
Thierry Strudel3d639192016-09-09 11:52:26 -070014694}; //end namespace qcamera