blob: 1c58f897699ee4f79bce179db348e492f2d395d0 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Thierry Strudel3d639192016-09-09 11:52:26 -0700136cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
137const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
138extern pthread_mutex_t gCamLock;
139volatile uint32_t gCamHal3LogLevel = 1;
140extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700141
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800142// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143// The following Easel related variables must be protected by gHdrPlusClientLock.
144EaselManagerClient gEaselManagerClient;
145bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
146std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
147bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700148bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700149bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800151// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
152bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
154Mutex gHdrPlusClientLock; // Protect above Easel related variables.
155
Thierry Strudel3d639192016-09-09 11:52:26 -0700156
157const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
158 {"On", CAM_CDS_MODE_ON},
159 {"Off", CAM_CDS_MODE_OFF},
160 {"Auto",CAM_CDS_MODE_AUTO}
161};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700162const QCamera3HardwareInterface::QCameraMap<
163 camera_metadata_enum_android_video_hdr_mode_t,
164 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
165 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
166 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
167};
168
Thierry Strudel54dc9782017-02-15 12:12:10 -0800169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_binning_correction_mode_t,
171 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
172 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
173 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
174};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700175
176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_ir_mode_t,
178 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
179 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
180 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
181 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
182};
Thierry Strudel3d639192016-09-09 11:52:26 -0700183
184const QCamera3HardwareInterface::QCameraMap<
185 camera_metadata_enum_android_control_effect_mode_t,
186 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
187 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
188 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
189 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
190 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
191 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
192 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
193 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
194 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
195 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
196};
197
198const QCamera3HardwareInterface::QCameraMap<
199 camera_metadata_enum_android_control_awb_mode_t,
200 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
201 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
202 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
203 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
204 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
205 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
206 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
207 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
208 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
209 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
210};
211
212const QCamera3HardwareInterface::QCameraMap<
213 camera_metadata_enum_android_control_scene_mode_t,
214 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
215 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
216 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
217 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
218 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
219 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
220 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
222 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
223 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
224 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
225 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
226 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
227 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
228 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
229 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800230 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
231 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700232};
233
234const QCamera3HardwareInterface::QCameraMap<
235 camera_metadata_enum_android_control_af_mode_t,
236 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
237 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
238 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
239 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
240 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
241 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
242 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
243 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
244};
245
246const QCamera3HardwareInterface::QCameraMap<
247 camera_metadata_enum_android_color_correction_aberration_mode_t,
248 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
249 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
250 CAM_COLOR_CORRECTION_ABERRATION_OFF },
251 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
252 CAM_COLOR_CORRECTION_ABERRATION_FAST },
253 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
254 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_control_ae_antibanding_mode_t,
259 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
260 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
261 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
262 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_control_ae_mode_t,
268 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
269 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
270 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
271 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
272 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
273 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
274};
275
276const QCamera3HardwareInterface::QCameraMap<
277 camera_metadata_enum_android_flash_mode_t,
278 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
279 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
280 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
281 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_statistics_face_detect_mode_t,
286 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
287 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
288 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
289 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
294 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
295 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
296 CAM_FOCUS_UNCALIBRATED },
297 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
298 CAM_FOCUS_APPROXIMATE },
299 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
300 CAM_FOCUS_CALIBRATED }
301};
302
303const QCamera3HardwareInterface::QCameraMap<
304 camera_metadata_enum_android_lens_state_t,
305 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
306 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
307 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
308};
309
310const int32_t available_thumbnail_sizes[] = {0, 0,
311 176, 144,
312 240, 144,
313 256, 144,
314 240, 160,
315 256, 154,
316 240, 240,
317 320, 240};
318
319const QCamera3HardwareInterface::QCameraMap<
320 camera_metadata_enum_android_sensor_test_pattern_mode_t,
321 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
322 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
323 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
324 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
328};
329
330/* Since there is no mapping for all the options some Android enum are not listed.
331 * Also, the order in this list is important because while mapping from HAL to Android it will
332 * traverse from lower to higher index which means that for HAL values that are map to different
333 * Android values, the traverse logic will select the first one found.
334 */
335const QCamera3HardwareInterface::QCameraMap<
336 camera_metadata_enum_android_sensor_reference_illuminant1_t,
337 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
338 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
339 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
340 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
354};
355
356const QCamera3HardwareInterface::QCameraMap<
357 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
358 { 60, CAM_HFR_MODE_60FPS},
359 { 90, CAM_HFR_MODE_90FPS},
360 { 120, CAM_HFR_MODE_120FPS},
361 { 150, CAM_HFR_MODE_150FPS},
362 { 180, CAM_HFR_MODE_180FPS},
363 { 210, CAM_HFR_MODE_210FPS},
364 { 240, CAM_HFR_MODE_240FPS},
365 { 480, CAM_HFR_MODE_480FPS},
366};
367
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700368const QCamera3HardwareInterface::QCameraMap<
369 qcamera3_ext_instant_aec_mode_t,
370 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
371 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
372 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
373 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
374};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800375
376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_exposure_meter_mode_t,
378 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
379 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
380 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
381 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
382 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
383 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
386};
387
388const QCamera3HardwareInterface::QCameraMap<
389 qcamera3_ext_iso_mode_t,
390 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
391 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
392 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
393 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
394 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
395 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
396 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
397 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
398 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
399};
400
Thierry Strudel3d639192016-09-09 11:52:26 -0700401camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
402 .initialize = QCamera3HardwareInterface::initialize,
403 .configure_streams = QCamera3HardwareInterface::configure_streams,
404 .register_stream_buffers = NULL,
405 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
406 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
407 .get_metadata_vendor_tag_ops = NULL,
408 .dump = QCamera3HardwareInterface::dump,
409 .flush = QCamera3HardwareInterface::flush,
410 .reserved = {0},
411};
412
413// initialise to some default value
414uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
415
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700416static inline void logEaselEvent(const char *tag, const char *event) {
417 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
418 struct timespec ts = {};
419 static int64_t kMsPerSec = 1000;
420 static int64_t kNsPerMs = 1000000;
421 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
422 if (res != OK) {
423 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
424 } else {
425 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
426 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
427 }
428 }
429}
430
Thierry Strudel3d639192016-09-09 11:52:26 -0700431/*===========================================================================
432 * FUNCTION : QCamera3HardwareInterface
433 *
434 * DESCRIPTION: constructor of QCamera3HardwareInterface
435 *
436 * PARAMETERS :
437 * @cameraId : camera ID
438 *
439 * RETURN : none
440 *==========================================================================*/
441QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
442 const camera_module_callbacks_t *callbacks)
443 : mCameraId(cameraId),
444 mCameraHandle(NULL),
445 mCameraInitialized(false),
446 mCallbackOps(NULL),
447 mMetadataChannel(NULL),
448 mPictureChannel(NULL),
449 mRawChannel(NULL),
450 mSupportChannel(NULL),
451 mAnalysisChannel(NULL),
452 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700453 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800455 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mChannelHandle(0),
458 mFirstConfiguration(true),
459 mFlush(false),
460 mFlushPerf(false),
461 mParamHeap(NULL),
462 mParameters(NULL),
463 mPrevParameters(NULL),
464 m_bIsVideo(false),
465 m_bIs4KVideo(false),
466 m_bEisSupportedSize(false),
467 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800468 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700469 m_MobicatMask(0),
470 mMinProcessedFrameDuration(0),
471 mMinJpegFrameDuration(0),
472 mMinRawFrameDuration(0),
473 mMetaFrameCount(0U),
474 mUpdateDebugLevel(false),
475 mCallbacks(callbacks),
476 mCaptureIntent(0),
477 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700478 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800479 /* DevCamDebug metadata internal m control*/
480 mDevCamDebugMetaEnable(0),
481 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mBatchSize(0),
483 mToBeQueuedVidBufs(0),
484 mHFRVideoFps(DEFAULT_VIDEO_FPS),
485 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800486 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800487 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700488 mFirstFrameNumberInBatch(0),
489 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800490 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700491 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
492 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000493 mPDSupported(false),
494 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700495 mInstantAEC(false),
496 mResetInstantAEC(false),
497 mInstantAECSettledFrameNumber(0),
498 mAecSkipDisplayFrameBound(0),
499 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800500 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700502 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700503 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700504 mState(CLOSED),
505 mIsDeviceLinked(false),
506 mIsMainCamera(true),
507 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700508 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800509 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800510 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700511 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800512 mIsApInputUsedForHdrPlus(false),
513 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800514 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700515{
516 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700517 mCommon.init(gCamCapability[cameraId]);
518 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700519#ifndef USE_HAL_3_3
520 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
521#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700522 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700523#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700524 mCameraDevice.common.close = close_camera_device;
525 mCameraDevice.ops = &mCameraOps;
526 mCameraDevice.priv = this;
527 gCamCapability[cameraId]->version = CAM_HAL_V3;
528 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
529 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
530 gCamCapability[cameraId]->min_num_pp_bufs = 3;
531
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800532 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700533
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800534 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700535 mPendingLiveRequest = 0;
536 mCurrentRequestId = -1;
537 pthread_mutex_init(&mMutex, NULL);
538
539 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
540 mDefaultMetadata[i] = NULL;
541
542 // Getting system props of different kinds
543 char prop[PROPERTY_VALUE_MAX];
544 memset(prop, 0, sizeof(prop));
545 property_get("persist.camera.raw.dump", prop, "0");
546 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800547 property_get("persist.camera.hal3.force.hdr", prop, "0");
548 mForceHdrSnapshot = atoi(prop);
549
Thierry Strudel3d639192016-09-09 11:52:26 -0700550 if (mEnableRawDump)
551 LOGD("Raw dump from Camera HAL enabled");
552
553 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
554 memset(mLdafCalib, 0, sizeof(mLdafCalib));
555
556 memset(prop, 0, sizeof(prop));
557 property_get("persist.camera.tnr.preview", prop, "0");
558 m_bTnrPreview = (uint8_t)atoi(prop);
559
560 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800561 property_get("persist.camera.swtnr.preview", prop, "1");
562 m_bSwTnrPreview = (uint8_t)atoi(prop);
563
564 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700565 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700566 m_bTnrVideo = (uint8_t)atoi(prop);
567
568 memset(prop, 0, sizeof(prop));
569 property_get("persist.camera.avtimer.debug", prop, "0");
570 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800571 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700572
Thierry Strudel54dc9782017-02-15 12:12:10 -0800573 memset(prop, 0, sizeof(prop));
574 property_get("persist.camera.cacmode.disable", prop, "0");
575 m_cacModeDisabled = (uint8_t)atoi(prop);
576
Thierry Strudel3d639192016-09-09 11:52:26 -0700577 //Load and read GPU library.
578 lib_surface_utils = NULL;
579 LINK_get_surface_pixel_alignment = NULL;
580 mSurfaceStridePadding = CAM_PAD_TO_32;
581 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
582 if (lib_surface_utils) {
583 *(void **)&LINK_get_surface_pixel_alignment =
584 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
585 if (LINK_get_surface_pixel_alignment) {
586 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
587 }
588 dlclose(lib_surface_utils);
589 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700590
Emilian Peev0f3c3162017-03-15 12:57:46 +0000591 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
592 mPDSupported = (0 <= mPDIndex) ? true : false;
593
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700594 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700595}
596
597/*===========================================================================
598 * FUNCTION : ~QCamera3HardwareInterface
599 *
600 * DESCRIPTION: destructor of QCamera3HardwareInterface
601 *
602 * PARAMETERS : none
603 *
604 * RETURN : none
605 *==========================================================================*/
606QCamera3HardwareInterface::~QCamera3HardwareInterface()
607{
608 LOGD("E");
609
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800610 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700611
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800612 // Disable power hint and enable the perf lock for close camera
613 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
614 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
615
616 // unlink of dualcam during close camera
617 if (mIsDeviceLinked) {
618 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
619 &m_pDualCamCmdPtr->bundle_info;
620 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
621 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
622 pthread_mutex_lock(&gCamLock);
623
624 if (mIsMainCamera == 1) {
625 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
626 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
627 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
628 // related session id should be session id of linked session
629 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
630 } else {
631 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
632 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
633 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
634 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
635 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800636 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800637 pthread_mutex_unlock(&gCamLock);
638
639 rc = mCameraHandle->ops->set_dual_cam_cmd(
640 mCameraHandle->camera_handle);
641 if (rc < 0) {
642 LOGE("Dualcam: Unlink failed, but still proceed to close");
643 }
644 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700645
646 /* We need to stop all streams before deleting any stream */
647 if (mRawDumpChannel) {
648 mRawDumpChannel->stop();
649 }
650
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700651 if (mHdrPlusRawSrcChannel) {
652 mHdrPlusRawSrcChannel->stop();
653 }
654
Thierry Strudel3d639192016-09-09 11:52:26 -0700655 // NOTE: 'camera3_stream_t *' objects are already freed at
656 // this stage by the framework
657 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
658 it != mStreamInfo.end(); it++) {
659 QCamera3ProcessingChannel *channel = (*it)->channel;
660 if (channel) {
661 channel->stop();
662 }
663 }
664 if (mSupportChannel)
665 mSupportChannel->stop();
666
667 if (mAnalysisChannel) {
668 mAnalysisChannel->stop();
669 }
670 if (mMetadataChannel) {
671 mMetadataChannel->stop();
672 }
673 if (mChannelHandle) {
674 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
675 mChannelHandle);
676 LOGD("stopping channel %d", mChannelHandle);
677 }
678
679 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
680 it != mStreamInfo.end(); it++) {
681 QCamera3ProcessingChannel *channel = (*it)->channel;
682 if (channel)
683 delete channel;
684 free (*it);
685 }
686 if (mSupportChannel) {
687 delete mSupportChannel;
688 mSupportChannel = NULL;
689 }
690
691 if (mAnalysisChannel) {
692 delete mAnalysisChannel;
693 mAnalysisChannel = NULL;
694 }
695 if (mRawDumpChannel) {
696 delete mRawDumpChannel;
697 mRawDumpChannel = NULL;
698 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700699 if (mHdrPlusRawSrcChannel) {
700 delete mHdrPlusRawSrcChannel;
701 mHdrPlusRawSrcChannel = NULL;
702 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700703 if (mDummyBatchChannel) {
704 delete mDummyBatchChannel;
705 mDummyBatchChannel = NULL;
706 }
707
708 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800709 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700710
711 if (mMetadataChannel) {
712 delete mMetadataChannel;
713 mMetadataChannel = NULL;
714 }
715
716 /* Clean up all channels */
717 if (mCameraInitialized) {
718 if(!mFirstConfiguration){
719 //send the last unconfigure
720 cam_stream_size_info_t stream_config_info;
721 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
722 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
723 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800724 m_bIs4KVideo ? 0 :
725 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700726 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700727 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
728 stream_config_info);
729 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
730 if (rc < 0) {
731 LOGE("set_parms failed for unconfigure");
732 }
733 }
734 deinitParameters();
735 }
736
737 if (mChannelHandle) {
738 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
739 mChannelHandle);
740 LOGH("deleting channel %d", mChannelHandle);
741 mChannelHandle = 0;
742 }
743
744 if (mState != CLOSED)
745 closeCamera();
746
747 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
748 req.mPendingBufferList.clear();
749 }
750 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700751 for (pendingRequestIterator i = mPendingRequestsList.begin();
752 i != mPendingRequestsList.end();) {
753 i = erasePendingRequest(i);
754 }
755 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
756 if (mDefaultMetadata[i])
757 free_camera_metadata(mDefaultMetadata[i]);
758
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800759 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700760
761 pthread_cond_destroy(&mRequestCond);
762
763 pthread_cond_destroy(&mBuffersCond);
764
765 pthread_mutex_destroy(&mMutex);
766 LOGD("X");
767}
768
769/*===========================================================================
770 * FUNCTION : erasePendingRequest
771 *
772 * DESCRIPTION: function to erase a desired pending request after freeing any
773 * allocated memory
774 *
775 * PARAMETERS :
776 * @i : iterator pointing to pending request to be erased
777 *
778 * RETURN : iterator pointing to the next request
779 *==========================================================================*/
780QCamera3HardwareInterface::pendingRequestIterator
781 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
782{
783 if (i->input_buffer != NULL) {
784 free(i->input_buffer);
785 i->input_buffer = NULL;
786 }
787 if (i->settings != NULL)
788 free_camera_metadata((camera_metadata_t*)i->settings);
789 return mPendingRequestsList.erase(i);
790}
791
792/*===========================================================================
793 * FUNCTION : camEvtHandle
794 *
795 * DESCRIPTION: Function registered to mm-camera-interface to handle events
796 *
797 * PARAMETERS :
798 * @camera_handle : interface layer camera handle
799 * @evt : ptr to event
800 * @user_data : user data ptr
801 *
802 * RETURN : none
803 *==========================================================================*/
804void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
805 mm_camera_event_t *evt,
806 void *user_data)
807{
808 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
809 if (obj && evt) {
810 switch(evt->server_event_type) {
811 case CAM_EVENT_TYPE_DAEMON_DIED:
812 pthread_mutex_lock(&obj->mMutex);
813 obj->mState = ERROR;
814 pthread_mutex_unlock(&obj->mMutex);
815 LOGE("Fatal, camera daemon died");
816 break;
817
818 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
819 LOGD("HAL got request pull from Daemon");
820 pthread_mutex_lock(&obj->mMutex);
821 obj->mWokenUpByDaemon = true;
822 obj->unblockRequestIfNecessary();
823 pthread_mutex_unlock(&obj->mMutex);
824 break;
825
826 default:
827 LOGW("Warning: Unhandled event %d",
828 evt->server_event_type);
829 break;
830 }
831 } else {
832 LOGE("NULL user_data/evt");
833 }
834}
835
836/*===========================================================================
837 * FUNCTION : openCamera
838 *
839 * DESCRIPTION: open camera
840 *
841 * PARAMETERS :
842 * @hw_device : double ptr for camera device struct
843 *
844 * RETURN : int32_t type of status
845 * NO_ERROR -- success
846 * none-zero failure code
847 *==========================================================================*/
848int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
849{
850 int rc = 0;
851 if (mState != CLOSED) {
852 *hw_device = NULL;
853 return PERMISSION_DENIED;
854 }
855
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700856 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800857 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700858 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
859 mCameraId);
860
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700861 if (mCameraHandle) {
862 LOGE("Failure: Camera already opened");
863 return ALREADY_EXISTS;
864 }
865
866 {
867 Mutex::Autolock l(gHdrPlusClientLock);
868 if (gEaselManagerClient.isEaselPresentOnDevice()) {
869 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
870 rc = gEaselManagerClient.resume();
871 if (rc != 0) {
872 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
873 return rc;
874 }
875 }
876 }
877
Thierry Strudel3d639192016-09-09 11:52:26 -0700878 rc = openCamera();
879 if (rc == 0) {
880 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800881 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700882 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700883
884 // Suspend Easel because opening camera failed.
885 {
886 Mutex::Autolock l(gHdrPlusClientLock);
887 if (gEaselManagerClient.isEaselPresentOnDevice()) {
888 status_t suspendErr = gEaselManagerClient.suspend();
889 if (suspendErr != 0) {
890 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
891 strerror(-suspendErr), suspendErr);
892 }
893 }
894 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800895 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700896
Thierry Strudel3d639192016-09-09 11:52:26 -0700897 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
898 mCameraId, rc);
899
900 if (rc == NO_ERROR) {
901 mState = OPENED;
902 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800903
Thierry Strudel3d639192016-09-09 11:52:26 -0700904 return rc;
905}
906
907/*===========================================================================
908 * FUNCTION : openCamera
909 *
910 * DESCRIPTION: open camera
911 *
912 * PARAMETERS : none
913 *
914 * RETURN : int32_t type of status
915 * NO_ERROR -- success
916 * none-zero failure code
917 *==========================================================================*/
918int QCamera3HardwareInterface::openCamera()
919{
920 int rc = 0;
921 char value[PROPERTY_VALUE_MAX];
922
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800923 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800924
Thierry Strudel3d639192016-09-09 11:52:26 -0700925 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
926 if (rc < 0) {
927 LOGE("Failed to reserve flash for camera id: %d",
928 mCameraId);
929 return UNKNOWN_ERROR;
930 }
931
932 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
933 if (rc) {
934 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
935 return rc;
936 }
937
938 if (!mCameraHandle) {
939 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
940 return -ENODEV;
941 }
942
943 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
944 camEvtHandle, (void *)this);
945
946 if (rc < 0) {
947 LOGE("Error, failed to register event callback");
948 /* Not closing camera here since it is already handled in destructor */
949 return FAILED_TRANSACTION;
950 }
951
952 mExifParams.debug_params =
953 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
954 if (mExifParams.debug_params) {
955 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
956 } else {
957 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
958 return NO_MEMORY;
959 }
960 mFirstConfiguration = true;
961
962 //Notify display HAL that a camera session is active.
963 //But avoid calling the same during bootup because camera service might open/close
964 //cameras at boot time during its initialization and display service will also internally
965 //wait for camera service to initialize first while calling this display API, resulting in a
966 //deadlock situation. Since boot time camera open/close calls are made only to fetch
967 //capabilities, no need of this display bw optimization.
968 //Use "service.bootanim.exit" property to know boot status.
969 property_get("service.bootanim.exit", value, "0");
970 if (atoi(value) == 1) {
971 pthread_mutex_lock(&gCamLock);
972 if (gNumCameraSessions++ == 0) {
973 setCameraLaunchStatus(true);
974 }
975 pthread_mutex_unlock(&gCamLock);
976 }
977
978 //fill the session id needed while linking dual cam
979 pthread_mutex_lock(&gCamLock);
980 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
981 &sessionId[mCameraId]);
982 pthread_mutex_unlock(&gCamLock);
983
984 if (rc < 0) {
985 LOGE("Error, failed to get sessiion id");
986 return UNKNOWN_ERROR;
987 } else {
988 //Allocate related cam sync buffer
989 //this is needed for the payload that goes along with bundling cmd for related
990 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700991 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
992 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700993 if(rc != OK) {
994 rc = NO_MEMORY;
995 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
996 return NO_MEMORY;
997 }
998
999 //Map memory for related cam sync buffer
1000 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001001 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1002 m_pDualCamCmdHeap->getFd(0),
1003 sizeof(cam_dual_camera_cmd_info_t),
1004 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001005 if(rc < 0) {
1006 LOGE("Dualcam: failed to map Related cam sync buffer");
1007 rc = FAILED_TRANSACTION;
1008 return NO_MEMORY;
1009 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001010 m_pDualCamCmdPtr =
1011 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001012 }
1013
1014 LOGH("mCameraId=%d",mCameraId);
1015
1016 return NO_ERROR;
1017}
1018
1019/*===========================================================================
1020 * FUNCTION : closeCamera
1021 *
1022 * DESCRIPTION: close camera
1023 *
1024 * PARAMETERS : none
1025 *
1026 * RETURN : int32_t type of status
1027 * NO_ERROR -- success
1028 * none-zero failure code
1029 *==========================================================================*/
1030int QCamera3HardwareInterface::closeCamera()
1031{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001032 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001033 int rc = NO_ERROR;
1034 char value[PROPERTY_VALUE_MAX];
1035
1036 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1037 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001038
1039 // unmap memory for related cam sync buffer
1040 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001041 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001042 if (NULL != m_pDualCamCmdHeap) {
1043 m_pDualCamCmdHeap->deallocate();
1044 delete m_pDualCamCmdHeap;
1045 m_pDualCamCmdHeap = NULL;
1046 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001047 }
1048
Thierry Strudel3d639192016-09-09 11:52:26 -07001049 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1050 mCameraHandle = NULL;
1051
1052 //reset session id to some invalid id
1053 pthread_mutex_lock(&gCamLock);
1054 sessionId[mCameraId] = 0xDEADBEEF;
1055 pthread_mutex_unlock(&gCamLock);
1056
1057 //Notify display HAL that there is no active camera session
1058 //but avoid calling the same during bootup. Refer to openCamera
1059 //for more details.
1060 property_get("service.bootanim.exit", value, "0");
1061 if (atoi(value) == 1) {
1062 pthread_mutex_lock(&gCamLock);
1063 if (--gNumCameraSessions == 0) {
1064 setCameraLaunchStatus(false);
1065 }
1066 pthread_mutex_unlock(&gCamLock);
1067 }
1068
Thierry Strudel3d639192016-09-09 11:52:26 -07001069 if (mExifParams.debug_params) {
1070 free(mExifParams.debug_params);
1071 mExifParams.debug_params = NULL;
1072 }
1073 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1074 LOGW("Failed to release flash for camera id: %d",
1075 mCameraId);
1076 }
1077 mState = CLOSED;
1078 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1079 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001080
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001081 {
1082 Mutex::Autolock l(gHdrPlusClientLock);
1083 if (gHdrPlusClient != nullptr) {
1084 // Disable HDR+ mode.
1085 disableHdrPlusModeLocked();
1086 // Disconnect Easel if it's connected.
1087 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1088 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001089 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001090
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001091 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001092 rc = gEaselManagerClient.stopMipi(mCameraId);
1093 if (rc != 0) {
1094 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1095 }
1096
1097 rc = gEaselManagerClient.suspend();
1098 if (rc != 0) {
1099 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1100 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001101 }
1102 }
1103
Thierry Strudel3d639192016-09-09 11:52:26 -07001104 return rc;
1105}
1106
1107/*===========================================================================
1108 * FUNCTION : initialize
1109 *
1110 * DESCRIPTION: Initialize frameworks callback functions
1111 *
1112 * PARAMETERS :
1113 * @callback_ops : callback function to frameworks
1114 *
1115 * RETURN :
1116 *
1117 *==========================================================================*/
1118int QCamera3HardwareInterface::initialize(
1119 const struct camera3_callback_ops *callback_ops)
1120{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001121 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001122 int rc;
1123
1124 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1125 pthread_mutex_lock(&mMutex);
1126
1127 // Validate current state
1128 switch (mState) {
1129 case OPENED:
1130 /* valid state */
1131 break;
1132 default:
1133 LOGE("Invalid state %d", mState);
1134 rc = -ENODEV;
1135 goto err1;
1136 }
1137
1138 rc = initParameters();
1139 if (rc < 0) {
1140 LOGE("initParamters failed %d", rc);
1141 goto err1;
1142 }
1143 mCallbackOps = callback_ops;
1144
1145 mChannelHandle = mCameraHandle->ops->add_channel(
1146 mCameraHandle->camera_handle, NULL, NULL, this);
1147 if (mChannelHandle == 0) {
1148 LOGE("add_channel failed");
1149 rc = -ENOMEM;
1150 pthread_mutex_unlock(&mMutex);
1151 return rc;
1152 }
1153
1154 pthread_mutex_unlock(&mMutex);
1155 mCameraInitialized = true;
1156 mState = INITIALIZED;
1157 LOGI("X");
1158 return 0;
1159
1160err1:
1161 pthread_mutex_unlock(&mMutex);
1162 return rc;
1163}
1164
1165/*===========================================================================
1166 * FUNCTION : validateStreamDimensions
1167 *
1168 * DESCRIPTION: Check if the configuration requested are those advertised
1169 *
1170 * PARAMETERS :
1171 * @stream_list : streams to be configured
1172 *
1173 * RETURN :
1174 *
1175 *==========================================================================*/
1176int QCamera3HardwareInterface::validateStreamDimensions(
1177 camera3_stream_configuration_t *streamList)
1178{
1179 int rc = NO_ERROR;
1180 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001181 uint32_t depthWidth = 0;
1182 uint32_t depthHeight = 0;
1183 if (mPDSupported) {
1184 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1185 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1186 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001187
1188 camera3_stream_t *inputStream = NULL;
1189 /*
1190 * Loop through all streams to find input stream if it exists*
1191 */
1192 for (size_t i = 0; i< streamList->num_streams; i++) {
1193 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1194 if (inputStream != NULL) {
1195 LOGE("Error, Multiple input streams requested");
1196 return -EINVAL;
1197 }
1198 inputStream = streamList->streams[i];
1199 }
1200 }
1201 /*
1202 * Loop through all streams requested in configuration
1203 * Check if unsupported sizes have been requested on any of them
1204 */
1205 for (size_t j = 0; j < streamList->num_streams; j++) {
1206 bool sizeFound = false;
1207 camera3_stream_t *newStream = streamList->streams[j];
1208
1209 uint32_t rotatedHeight = newStream->height;
1210 uint32_t rotatedWidth = newStream->width;
1211 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1212 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1213 rotatedHeight = newStream->width;
1214 rotatedWidth = newStream->height;
1215 }
1216
1217 /*
1218 * Sizes are different for each type of stream format check against
1219 * appropriate table.
1220 */
1221 switch (newStream->format) {
1222 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1223 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1224 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001225 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1226 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1227 mPDSupported) {
1228 if ((depthWidth == newStream->width) &&
1229 (depthHeight == newStream->height)) {
1230 sizeFound = true;
1231 }
1232 break;
1233 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001234 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1235 for (size_t i = 0; i < count; i++) {
1236 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1237 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1238 sizeFound = true;
1239 break;
1240 }
1241 }
1242 break;
1243 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001244 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1245 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001246 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001247 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001248 if ((depthSamplesCount == newStream->width) &&
1249 (1 == newStream->height)) {
1250 sizeFound = true;
1251 }
1252 break;
1253 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001254 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1255 /* Verify set size against generated sizes table */
1256 for (size_t i = 0; i < count; i++) {
1257 if (((int32_t)rotatedWidth ==
1258 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1259 ((int32_t)rotatedHeight ==
1260 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1261 sizeFound = true;
1262 break;
1263 }
1264 }
1265 break;
1266 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1267 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1268 default:
1269 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1270 || newStream->stream_type == CAMERA3_STREAM_INPUT
1271 || IS_USAGE_ZSL(newStream->usage)) {
1272 if (((int32_t)rotatedWidth ==
1273 gCamCapability[mCameraId]->active_array_size.width) &&
1274 ((int32_t)rotatedHeight ==
1275 gCamCapability[mCameraId]->active_array_size.height)) {
1276 sizeFound = true;
1277 break;
1278 }
1279 /* We could potentially break here to enforce ZSL stream
1280 * set from frameworks always is full active array size
1281 * but it is not clear from the spc if framework will always
1282 * follow that, also we have logic to override to full array
1283 * size, so keeping the logic lenient at the moment
1284 */
1285 }
1286 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1287 MAX_SIZES_CNT);
1288 for (size_t i = 0; i < count; i++) {
1289 if (((int32_t)rotatedWidth ==
1290 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1291 ((int32_t)rotatedHeight ==
1292 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1293 sizeFound = true;
1294 break;
1295 }
1296 }
1297 break;
1298 } /* End of switch(newStream->format) */
1299
1300 /* We error out even if a single stream has unsupported size set */
1301 if (!sizeFound) {
1302 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1303 rotatedWidth, rotatedHeight, newStream->format,
1304 gCamCapability[mCameraId]->active_array_size.width,
1305 gCamCapability[mCameraId]->active_array_size.height);
1306 rc = -EINVAL;
1307 break;
1308 }
1309 } /* End of for each stream */
1310 return rc;
1311}
1312
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001313/*===========================================================================
1314 * FUNCTION : validateUsageFlags
1315 *
1316 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1317 *
1318 * PARAMETERS :
1319 * @stream_list : streams to be configured
1320 *
1321 * RETURN :
1322 * NO_ERROR if the usage flags are supported
1323 * error code if usage flags are not supported
1324 *
1325 *==========================================================================*/
1326int QCamera3HardwareInterface::validateUsageFlags(
1327 const camera3_stream_configuration_t* streamList)
1328{
1329 for (size_t j = 0; j < streamList->num_streams; j++) {
1330 const camera3_stream_t *newStream = streamList->streams[j];
1331
1332 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1333 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1334 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1335 continue;
1336 }
1337
1338 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1339 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1340 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1341 bool forcePreviewUBWC = true;
1342 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1343 forcePreviewUBWC = false;
1344 }
1345 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1346 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1347 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1348 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1349 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1350 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1351
1352 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1353 // So color spaces will always match.
1354
1355 // Check whether underlying formats of shared streams match.
1356 if (isVideo && isPreview && videoFormat != previewFormat) {
1357 LOGE("Combined video and preview usage flag is not supported");
1358 return -EINVAL;
1359 }
1360 if (isPreview && isZSL && previewFormat != zslFormat) {
1361 LOGE("Combined preview and zsl usage flag is not supported");
1362 return -EINVAL;
1363 }
1364 if (isVideo && isZSL && videoFormat != zslFormat) {
1365 LOGE("Combined video and zsl usage flag is not supported");
1366 return -EINVAL;
1367 }
1368 }
1369 return NO_ERROR;
1370}
1371
1372/*===========================================================================
1373 * FUNCTION : validateUsageFlagsForEis
1374 *
1375 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1376 *
1377 * PARAMETERS :
1378 * @stream_list : streams to be configured
1379 *
1380 * RETURN :
1381 * NO_ERROR if the usage flags are supported
1382 * error code if usage flags are not supported
1383 *
1384 *==========================================================================*/
1385int QCamera3HardwareInterface::validateUsageFlagsForEis(
1386 const camera3_stream_configuration_t* streamList)
1387{
1388 for (size_t j = 0; j < streamList->num_streams; j++) {
1389 const camera3_stream_t *newStream = streamList->streams[j];
1390
1391 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1392 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1393
1394 // Because EIS is "hard-coded" for certain use case, and current
1395 // implementation doesn't support shared preview and video on the same
1396 // stream, return failure if EIS is forced on.
1397 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1398 LOGE("Combined video and preview usage flag is not supported due to EIS");
1399 return -EINVAL;
1400 }
1401 }
1402 return NO_ERROR;
1403}
1404
Thierry Strudel3d639192016-09-09 11:52:26 -07001405/*==============================================================================
1406 * FUNCTION : isSupportChannelNeeded
1407 *
1408 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1409 *
1410 * PARAMETERS :
1411 * @stream_list : streams to be configured
1412 * @stream_config_info : the config info for streams to be configured
1413 *
1414 * RETURN : Boolen true/false decision
1415 *
1416 *==========================================================================*/
1417bool QCamera3HardwareInterface::isSupportChannelNeeded(
1418 camera3_stream_configuration_t *streamList,
1419 cam_stream_size_info_t stream_config_info)
1420{
1421 uint32_t i;
1422 bool pprocRequested = false;
1423 /* Check for conditions where PProc pipeline does not have any streams*/
1424 for (i = 0; i < stream_config_info.num_streams; i++) {
1425 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1426 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1427 pprocRequested = true;
1428 break;
1429 }
1430 }
1431
1432 if (pprocRequested == false )
1433 return true;
1434
1435 /* Dummy stream needed if only raw or jpeg streams present */
1436 for (i = 0; i < streamList->num_streams; i++) {
1437 switch(streamList->streams[i]->format) {
1438 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1439 case HAL_PIXEL_FORMAT_RAW10:
1440 case HAL_PIXEL_FORMAT_RAW16:
1441 case HAL_PIXEL_FORMAT_BLOB:
1442 break;
1443 default:
1444 return false;
1445 }
1446 }
1447 return true;
1448}
1449
1450/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001451 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001452 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001453 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001454 *
1455 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001456 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001457 *
1458 * RETURN : int32_t type of status
1459 * NO_ERROR -- success
1460 * none-zero failure code
1461 *
1462 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001463int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001464{
1465 int32_t rc = NO_ERROR;
1466
1467 cam_dimension_t max_dim = {0, 0};
1468 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1469 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1470 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1471 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1472 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1473 }
1474
1475 clear_metadata_buffer(mParameters);
1476
1477 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1478 max_dim);
1479 if (rc != NO_ERROR) {
1480 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1481 return rc;
1482 }
1483
1484 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1485 if (rc != NO_ERROR) {
1486 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1487 return rc;
1488 }
1489
1490 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001491 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001492
1493 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1494 mParameters);
1495 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001496 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001497 return rc;
1498 }
1499
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001500 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001501 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1502 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1503 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1504 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1505 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001506
1507 return rc;
1508}
1509
1510/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001511 * FUNCTION : addToPPFeatureMask
1512 *
1513 * DESCRIPTION: add additional features to pp feature mask based on
1514 * stream type and usecase
1515 *
1516 * PARAMETERS :
1517 * @stream_format : stream type for feature mask
1518 * @stream_idx : stream idx within postprocess_mask list to change
1519 *
1520 * RETURN : NULL
1521 *
1522 *==========================================================================*/
1523void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1524 uint32_t stream_idx)
1525{
1526 char feature_mask_value[PROPERTY_VALUE_MAX];
1527 cam_feature_mask_t feature_mask;
1528 int args_converted;
1529 int property_len;
1530
1531 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001532#ifdef _LE_CAMERA_
1533 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1534 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1535 property_len = property_get("persist.camera.hal3.feature",
1536 feature_mask_value, swtnr_feature_mask_value);
1537#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001538 property_len = property_get("persist.camera.hal3.feature",
1539 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001540#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001541 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1542 (feature_mask_value[1] == 'x')) {
1543 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1544 } else {
1545 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1546 }
1547 if (1 != args_converted) {
1548 feature_mask = 0;
1549 LOGE("Wrong feature mask %s", feature_mask_value);
1550 return;
1551 }
1552
1553 switch (stream_format) {
1554 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1555 /* Add LLVD to pp feature mask only if video hint is enabled */
1556 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1557 mStreamConfigInfo.postprocess_mask[stream_idx]
1558 |= CAM_QTI_FEATURE_SW_TNR;
1559 LOGH("Added SW TNR to pp feature mask");
1560 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1561 mStreamConfigInfo.postprocess_mask[stream_idx]
1562 |= CAM_QCOM_FEATURE_LLVD;
1563 LOGH("Added LLVD SeeMore to pp feature mask");
1564 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001565 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1566 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1567 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1568 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001569 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1570 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1571 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1572 CAM_QTI_FEATURE_BINNING_CORRECTION;
1573 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001574 break;
1575 }
1576 default:
1577 break;
1578 }
1579 LOGD("PP feature mask %llx",
1580 mStreamConfigInfo.postprocess_mask[stream_idx]);
1581}
1582
1583/*==============================================================================
1584 * FUNCTION : updateFpsInPreviewBuffer
1585 *
1586 * DESCRIPTION: update FPS information in preview buffer.
1587 *
1588 * PARAMETERS :
1589 * @metadata : pointer to metadata buffer
1590 * @frame_number: frame_number to look for in pending buffer list
1591 *
1592 * RETURN : None
1593 *
1594 *==========================================================================*/
1595void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1596 uint32_t frame_number)
1597{
1598 // Mark all pending buffers for this particular request
1599 // with corresponding framerate information
1600 for (List<PendingBuffersInRequest>::iterator req =
1601 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1602 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1603 for(List<PendingBufferInfo>::iterator j =
1604 req->mPendingBufferList.begin();
1605 j != req->mPendingBufferList.end(); j++) {
1606 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1607 if ((req->frame_number == frame_number) &&
1608 (channel->getStreamTypeMask() &
1609 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1610 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1611 CAM_INTF_PARM_FPS_RANGE, metadata) {
1612 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1613 struct private_handle_t *priv_handle =
1614 (struct private_handle_t *)(*(j->buffer));
1615 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1616 }
1617 }
1618 }
1619 }
1620}
1621
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001622/*==============================================================================
1623 * FUNCTION : updateTimeStampInPendingBuffers
1624 *
1625 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1626 * of a frame number
1627 *
1628 * PARAMETERS :
1629 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1630 * @timestamp : timestamp to be set
1631 *
1632 * RETURN : None
1633 *
1634 *==========================================================================*/
1635void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1636 uint32_t frameNumber, nsecs_t timestamp)
1637{
1638 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1639 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1640 if (req->frame_number != frameNumber)
1641 continue;
1642
1643 for (auto k = req->mPendingBufferList.begin();
1644 k != req->mPendingBufferList.end(); k++ ) {
1645 struct private_handle_t *priv_handle =
1646 (struct private_handle_t *) (*(k->buffer));
1647 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1648 }
1649 }
1650 return;
1651}
1652
Thierry Strudel3d639192016-09-09 11:52:26 -07001653/*===========================================================================
1654 * FUNCTION : configureStreams
1655 *
1656 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1657 * and output streams.
1658 *
1659 * PARAMETERS :
1660 * @stream_list : streams to be configured
1661 *
1662 * RETURN :
1663 *
1664 *==========================================================================*/
1665int QCamera3HardwareInterface::configureStreams(
1666 camera3_stream_configuration_t *streamList)
1667{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001668 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001669 int rc = 0;
1670
1671 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001672 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001673 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001674 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001675
1676 return rc;
1677}
1678
1679/*===========================================================================
1680 * FUNCTION : configureStreamsPerfLocked
1681 *
1682 * DESCRIPTION: configureStreams while perfLock is held.
1683 *
1684 * PARAMETERS :
1685 * @stream_list : streams to be configured
1686 *
1687 * RETURN : int32_t type of status
1688 * NO_ERROR -- success
1689 * none-zero failure code
1690 *==========================================================================*/
1691int QCamera3HardwareInterface::configureStreamsPerfLocked(
1692 camera3_stream_configuration_t *streamList)
1693{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001694 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001695 int rc = 0;
1696
1697 // Sanity check stream_list
1698 if (streamList == NULL) {
1699 LOGE("NULL stream configuration");
1700 return BAD_VALUE;
1701 }
1702 if (streamList->streams == NULL) {
1703 LOGE("NULL stream list");
1704 return BAD_VALUE;
1705 }
1706
1707 if (streamList->num_streams < 1) {
1708 LOGE("Bad number of streams requested: %d",
1709 streamList->num_streams);
1710 return BAD_VALUE;
1711 }
1712
1713 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1714 LOGE("Maximum number of streams %d exceeded: %d",
1715 MAX_NUM_STREAMS, streamList->num_streams);
1716 return BAD_VALUE;
1717 }
1718
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001719 rc = validateUsageFlags(streamList);
1720 if (rc != NO_ERROR) {
1721 return rc;
1722 }
1723
Thierry Strudel3d639192016-09-09 11:52:26 -07001724 mOpMode = streamList->operation_mode;
1725 LOGD("mOpMode: %d", mOpMode);
1726
1727 /* first invalidate all the steams in the mStreamList
1728 * if they appear again, they will be validated */
1729 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1730 it != mStreamInfo.end(); it++) {
1731 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1732 if (channel) {
1733 channel->stop();
1734 }
1735 (*it)->status = INVALID;
1736 }
1737
1738 if (mRawDumpChannel) {
1739 mRawDumpChannel->stop();
1740 delete mRawDumpChannel;
1741 mRawDumpChannel = NULL;
1742 }
1743
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001744 if (mHdrPlusRawSrcChannel) {
1745 mHdrPlusRawSrcChannel->stop();
1746 delete mHdrPlusRawSrcChannel;
1747 mHdrPlusRawSrcChannel = NULL;
1748 }
1749
Thierry Strudel3d639192016-09-09 11:52:26 -07001750 if (mSupportChannel)
1751 mSupportChannel->stop();
1752
1753 if (mAnalysisChannel) {
1754 mAnalysisChannel->stop();
1755 }
1756 if (mMetadataChannel) {
1757 /* If content of mStreamInfo is not 0, there is metadata stream */
1758 mMetadataChannel->stop();
1759 }
1760 if (mChannelHandle) {
1761 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1762 mChannelHandle);
1763 LOGD("stopping channel %d", mChannelHandle);
1764 }
1765
1766 pthread_mutex_lock(&mMutex);
1767
1768 // Check state
1769 switch (mState) {
1770 case INITIALIZED:
1771 case CONFIGURED:
1772 case STARTED:
1773 /* valid state */
1774 break;
1775 default:
1776 LOGE("Invalid state %d", mState);
1777 pthread_mutex_unlock(&mMutex);
1778 return -ENODEV;
1779 }
1780
1781 /* Check whether we have video stream */
1782 m_bIs4KVideo = false;
1783 m_bIsVideo = false;
1784 m_bEisSupportedSize = false;
1785 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001786 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001787 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001788 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001789 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001790 uint32_t videoWidth = 0U;
1791 uint32_t videoHeight = 0U;
1792 size_t rawStreamCnt = 0;
1793 size_t stallStreamCnt = 0;
1794 size_t processedStreamCnt = 0;
1795 // Number of streams on ISP encoder path
1796 size_t numStreamsOnEncoder = 0;
1797 size_t numYuv888OnEncoder = 0;
1798 bool bYuv888OverrideJpeg = false;
1799 cam_dimension_t largeYuv888Size = {0, 0};
1800 cam_dimension_t maxViewfinderSize = {0, 0};
1801 bool bJpegExceeds4K = false;
1802 bool bJpegOnEncoder = false;
1803 bool bUseCommonFeatureMask = false;
1804 cam_feature_mask_t commonFeatureMask = 0;
1805 bool bSmallJpegSize = false;
1806 uint32_t width_ratio;
1807 uint32_t height_ratio;
1808 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1809 camera3_stream_t *inputStream = NULL;
1810 bool isJpeg = false;
1811 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001812 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001813 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001814
1815 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1816
1817 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001818 uint8_t eis_prop_set;
1819 uint32_t maxEisWidth = 0;
1820 uint32_t maxEisHeight = 0;
1821
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001822 // Initialize all instant AEC related variables
1823 mInstantAEC = false;
1824 mResetInstantAEC = false;
1825 mInstantAECSettledFrameNumber = 0;
1826 mAecSkipDisplayFrameBound = 0;
1827 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001828 mCurrFeatureState = 0;
1829 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001830
Thierry Strudel3d639192016-09-09 11:52:26 -07001831 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1832
1833 size_t count = IS_TYPE_MAX;
1834 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1835 for (size_t i = 0; i < count; i++) {
1836 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001837 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1838 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001839 break;
1840 }
1841 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001842
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001843 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001844 maxEisWidth = MAX_EIS_WIDTH;
1845 maxEisHeight = MAX_EIS_HEIGHT;
1846 }
1847
1848 /* EIS setprop control */
1849 char eis_prop[PROPERTY_VALUE_MAX];
1850 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001851 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001852 eis_prop_set = (uint8_t)atoi(eis_prop);
1853
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001854 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001855 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1856
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001857 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1858 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001859
Thierry Strudel3d639192016-09-09 11:52:26 -07001860 /* stream configurations */
1861 for (size_t i = 0; i < streamList->num_streams; i++) {
1862 camera3_stream_t *newStream = streamList->streams[i];
1863 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1864 "height = %d, rotation = %d, usage = 0x%x",
1865 i, newStream->stream_type, newStream->format,
1866 newStream->width, newStream->height, newStream->rotation,
1867 newStream->usage);
1868 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1869 newStream->stream_type == CAMERA3_STREAM_INPUT){
1870 isZsl = true;
1871 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001872 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1873 IS_USAGE_PREVIEW(newStream->usage)) {
1874 isPreview = true;
1875 }
1876
Thierry Strudel3d639192016-09-09 11:52:26 -07001877 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1878 inputStream = newStream;
1879 }
1880
Emilian Peev7650c122017-01-19 08:24:33 -08001881 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1882 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001883 isJpeg = true;
1884 jpegSize.width = newStream->width;
1885 jpegSize.height = newStream->height;
1886 if (newStream->width > VIDEO_4K_WIDTH ||
1887 newStream->height > VIDEO_4K_HEIGHT)
1888 bJpegExceeds4K = true;
1889 }
1890
1891 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1892 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1893 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001894 // In HAL3 we can have multiple different video streams.
1895 // The variables video width and height are used below as
1896 // dimensions of the biggest of them
1897 if (videoWidth < newStream->width ||
1898 videoHeight < newStream->height) {
1899 videoWidth = newStream->width;
1900 videoHeight = newStream->height;
1901 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001902 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1903 (VIDEO_4K_HEIGHT <= newStream->height)) {
1904 m_bIs4KVideo = true;
1905 }
1906 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1907 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001908
Thierry Strudel3d639192016-09-09 11:52:26 -07001909 }
1910 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1911 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1912 switch (newStream->format) {
1913 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001914 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1915 depthPresent = true;
1916 break;
1917 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001918 stallStreamCnt++;
1919 if (isOnEncoder(maxViewfinderSize, newStream->width,
1920 newStream->height)) {
1921 numStreamsOnEncoder++;
1922 bJpegOnEncoder = true;
1923 }
1924 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1925 newStream->width);
1926 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1927 newStream->height);;
1928 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1929 "FATAL: max_downscale_factor cannot be zero and so assert");
1930 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1931 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1932 LOGH("Setting small jpeg size flag to true");
1933 bSmallJpegSize = true;
1934 }
1935 break;
1936 case HAL_PIXEL_FORMAT_RAW10:
1937 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1938 case HAL_PIXEL_FORMAT_RAW16:
1939 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001940 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1941 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1942 pdStatCount++;
1943 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001944 break;
1945 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1946 processedStreamCnt++;
1947 if (isOnEncoder(maxViewfinderSize, newStream->width,
1948 newStream->height)) {
1949 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1950 !IS_USAGE_ZSL(newStream->usage)) {
1951 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1952 }
1953 numStreamsOnEncoder++;
1954 }
1955 break;
1956 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1957 processedStreamCnt++;
1958 if (isOnEncoder(maxViewfinderSize, newStream->width,
1959 newStream->height)) {
1960 // If Yuv888 size is not greater than 4K, set feature mask
1961 // to SUPERSET so that it support concurrent request on
1962 // YUV and JPEG.
1963 if (newStream->width <= VIDEO_4K_WIDTH &&
1964 newStream->height <= VIDEO_4K_HEIGHT) {
1965 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1966 }
1967 numStreamsOnEncoder++;
1968 numYuv888OnEncoder++;
1969 largeYuv888Size.width = newStream->width;
1970 largeYuv888Size.height = newStream->height;
1971 }
1972 break;
1973 default:
1974 processedStreamCnt++;
1975 if (isOnEncoder(maxViewfinderSize, newStream->width,
1976 newStream->height)) {
1977 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1978 numStreamsOnEncoder++;
1979 }
1980 break;
1981 }
1982
1983 }
1984 }
1985
1986 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1987 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1988 !m_bIsVideo) {
1989 m_bEisEnable = false;
1990 }
1991
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001992 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1993 pthread_mutex_unlock(&mMutex);
1994 return -EINVAL;
1995 }
1996
Thierry Strudel54dc9782017-02-15 12:12:10 -08001997 uint8_t forceEnableTnr = 0;
1998 char tnr_prop[PROPERTY_VALUE_MAX];
1999 memset(tnr_prop, 0, sizeof(tnr_prop));
2000 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2001 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2002
Thierry Strudel3d639192016-09-09 11:52:26 -07002003 /* Logic to enable/disable TNR based on specific config size/etc.*/
2004 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002005 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2006 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002007 else if (forceEnableTnr)
2008 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002009
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002010 char videoHdrProp[PROPERTY_VALUE_MAX];
2011 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2012 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2013 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2014
2015 if (hdr_mode_prop == 1 && m_bIsVideo &&
2016 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2017 m_bVideoHdrEnabled = true;
2018 else
2019 m_bVideoHdrEnabled = false;
2020
2021
Thierry Strudel3d639192016-09-09 11:52:26 -07002022 /* Check if num_streams is sane */
2023 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2024 rawStreamCnt > MAX_RAW_STREAMS ||
2025 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2026 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2027 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2028 pthread_mutex_unlock(&mMutex);
2029 return -EINVAL;
2030 }
2031 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002032 if (isZsl && m_bIs4KVideo) {
2033 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002034 pthread_mutex_unlock(&mMutex);
2035 return -EINVAL;
2036 }
2037 /* Check if stream sizes are sane */
2038 if (numStreamsOnEncoder > 2) {
2039 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2040 pthread_mutex_unlock(&mMutex);
2041 return -EINVAL;
2042 } else if (1 < numStreamsOnEncoder){
2043 bUseCommonFeatureMask = true;
2044 LOGH("Multiple streams above max viewfinder size, common mask needed");
2045 }
2046
2047 /* Check if BLOB size is greater than 4k in 4k recording case */
2048 if (m_bIs4KVideo && bJpegExceeds4K) {
2049 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2050 pthread_mutex_unlock(&mMutex);
2051 return -EINVAL;
2052 }
2053
Emilian Peev7650c122017-01-19 08:24:33 -08002054 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2055 depthPresent) {
2056 LOGE("HAL doesn't support depth streams in HFR mode!");
2057 pthread_mutex_unlock(&mMutex);
2058 return -EINVAL;
2059 }
2060
Thierry Strudel3d639192016-09-09 11:52:26 -07002061 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2062 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2063 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2064 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2065 // configurations:
2066 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2067 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2068 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2069 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2070 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2071 __func__);
2072 pthread_mutex_unlock(&mMutex);
2073 return -EINVAL;
2074 }
2075
2076 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2077 // the YUV stream's size is greater or equal to the JPEG size, set common
2078 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2079 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2080 jpegSize.width, jpegSize.height) &&
2081 largeYuv888Size.width > jpegSize.width &&
2082 largeYuv888Size.height > jpegSize.height) {
2083 bYuv888OverrideJpeg = true;
2084 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2085 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2086 }
2087
2088 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2089 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2090 commonFeatureMask);
2091 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2092 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2093
2094 rc = validateStreamDimensions(streamList);
2095 if (rc == NO_ERROR) {
2096 rc = validateStreamRotations(streamList);
2097 }
2098 if (rc != NO_ERROR) {
2099 LOGE("Invalid stream configuration requested!");
2100 pthread_mutex_unlock(&mMutex);
2101 return rc;
2102 }
2103
Emilian Peev0f3c3162017-03-15 12:57:46 +00002104 if (1 < pdStatCount) {
2105 LOGE("HAL doesn't support multiple PD streams");
2106 pthread_mutex_unlock(&mMutex);
2107 return -EINVAL;
2108 }
2109
2110 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2111 (1 == pdStatCount)) {
2112 LOGE("HAL doesn't support PD streams in HFR mode!");
2113 pthread_mutex_unlock(&mMutex);
2114 return -EINVAL;
2115 }
2116
Thierry Strudel3d639192016-09-09 11:52:26 -07002117 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2118 for (size_t i = 0; i < streamList->num_streams; i++) {
2119 camera3_stream_t *newStream = streamList->streams[i];
2120 LOGH("newStream type = %d, stream format = %d "
2121 "stream size : %d x %d, stream rotation = %d",
2122 newStream->stream_type, newStream->format,
2123 newStream->width, newStream->height, newStream->rotation);
2124 //if the stream is in the mStreamList validate it
2125 bool stream_exists = false;
2126 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2127 it != mStreamInfo.end(); it++) {
2128 if ((*it)->stream == newStream) {
2129 QCamera3ProcessingChannel *channel =
2130 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2131 stream_exists = true;
2132 if (channel)
2133 delete channel;
2134 (*it)->status = VALID;
2135 (*it)->stream->priv = NULL;
2136 (*it)->channel = NULL;
2137 }
2138 }
2139 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2140 //new stream
2141 stream_info_t* stream_info;
2142 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2143 if (!stream_info) {
2144 LOGE("Could not allocate stream info");
2145 rc = -ENOMEM;
2146 pthread_mutex_unlock(&mMutex);
2147 return rc;
2148 }
2149 stream_info->stream = newStream;
2150 stream_info->status = VALID;
2151 stream_info->channel = NULL;
2152 mStreamInfo.push_back(stream_info);
2153 }
2154 /* Covers Opaque ZSL and API1 F/W ZSL */
2155 if (IS_USAGE_ZSL(newStream->usage)
2156 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2157 if (zslStream != NULL) {
2158 LOGE("Multiple input/reprocess streams requested!");
2159 pthread_mutex_unlock(&mMutex);
2160 return BAD_VALUE;
2161 }
2162 zslStream = newStream;
2163 }
2164 /* Covers YUV reprocess */
2165 if (inputStream != NULL) {
2166 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2167 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2168 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2169 && inputStream->width == newStream->width
2170 && inputStream->height == newStream->height) {
2171 if (zslStream != NULL) {
2172 /* This scenario indicates multiple YUV streams with same size
2173 * as input stream have been requested, since zsl stream handle
2174 * is solely use for the purpose of overriding the size of streams
2175 * which share h/w streams we will just make a guess here as to
2176 * which of the stream is a ZSL stream, this will be refactored
2177 * once we make generic logic for streams sharing encoder output
2178 */
2179 LOGH("Warning, Multiple ip/reprocess streams requested!");
2180 }
2181 zslStream = newStream;
2182 }
2183 }
2184 }
2185
2186 /* If a zsl stream is set, we know that we have configured at least one input or
2187 bidirectional stream */
2188 if (NULL != zslStream) {
2189 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2190 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2191 mInputStreamInfo.format = zslStream->format;
2192 mInputStreamInfo.usage = zslStream->usage;
2193 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2194 mInputStreamInfo.dim.width,
2195 mInputStreamInfo.dim.height,
2196 mInputStreamInfo.format, mInputStreamInfo.usage);
2197 }
2198
2199 cleanAndSortStreamInfo();
2200 if (mMetadataChannel) {
2201 delete mMetadataChannel;
2202 mMetadataChannel = NULL;
2203 }
2204 if (mSupportChannel) {
2205 delete mSupportChannel;
2206 mSupportChannel = NULL;
2207 }
2208
2209 if (mAnalysisChannel) {
2210 delete mAnalysisChannel;
2211 mAnalysisChannel = NULL;
2212 }
2213
2214 if (mDummyBatchChannel) {
2215 delete mDummyBatchChannel;
2216 mDummyBatchChannel = NULL;
2217 }
2218
Emilian Peev7650c122017-01-19 08:24:33 -08002219 if (mDepthChannel) {
2220 mDepthChannel = NULL;
2221 }
2222
Thierry Strudel2896d122017-02-23 19:18:03 -08002223 char is_type_value[PROPERTY_VALUE_MAX];
2224 property_get("persist.camera.is_type", is_type_value, "4");
2225 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2226
Binhao Line406f062017-05-03 14:39:44 -07002227 char property_value[PROPERTY_VALUE_MAX];
2228 property_get("persist.camera.gzoom.at", property_value, "0");
2229 int goog_zoom_at = atoi(property_value);
2230 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0);
2231 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0);
2232
2233 property_get("persist.camera.gzoom.4k", property_value, "0");
2234 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2235
Thierry Strudel3d639192016-09-09 11:52:26 -07002236 //Create metadata channel and initialize it
2237 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2238 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2239 gCamCapability[mCameraId]->color_arrangement);
2240 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2241 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002242 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002243 if (mMetadataChannel == NULL) {
2244 LOGE("failed to allocate metadata channel");
2245 rc = -ENOMEM;
2246 pthread_mutex_unlock(&mMutex);
2247 return rc;
2248 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002249 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002250 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2251 if (rc < 0) {
2252 LOGE("metadata channel initialization failed");
2253 delete mMetadataChannel;
2254 mMetadataChannel = NULL;
2255 pthread_mutex_unlock(&mMutex);
2256 return rc;
2257 }
2258
Thierry Strudel2896d122017-02-23 19:18:03 -08002259 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002260 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002261 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002262 // Keep track of preview/video streams indices.
2263 // There could be more than one preview streams, but only one video stream.
2264 int32_t video_stream_idx = -1;
2265 int32_t preview_stream_idx[streamList->num_streams];
2266 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002267 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2268 /* Allocate channel objects for the requested streams */
2269 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002270
Thierry Strudel3d639192016-09-09 11:52:26 -07002271 camera3_stream_t *newStream = streamList->streams[i];
2272 uint32_t stream_usage = newStream->usage;
2273 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2274 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2275 struct camera_info *p_info = NULL;
2276 pthread_mutex_lock(&gCamLock);
2277 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2278 pthread_mutex_unlock(&gCamLock);
2279 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2280 || IS_USAGE_ZSL(newStream->usage)) &&
2281 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002282 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002283 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002284 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2285 if (bUseCommonFeatureMask)
2286 zsl_ppmask = commonFeatureMask;
2287 else
2288 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002289 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002290 if (numStreamsOnEncoder > 0)
2291 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2292 else
2293 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002294 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002295 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002296 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002297 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002298 LOGH("Input stream configured, reprocess config");
2299 } else {
2300 //for non zsl streams find out the format
2301 switch (newStream->format) {
2302 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2303 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002304 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002305 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2306 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2307 /* add additional features to pp feature mask */
2308 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2309 mStreamConfigInfo.num_streams);
2310
2311 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2312 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2313 CAM_STREAM_TYPE_VIDEO;
2314 if (m_bTnrEnabled && m_bTnrVideo) {
2315 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2316 CAM_QCOM_FEATURE_CPP_TNR;
2317 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2318 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2319 ~CAM_QCOM_FEATURE_CDS;
2320 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002321 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2322 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2323 CAM_QTI_FEATURE_PPEISCORE;
2324 }
Binhao Line406f062017-05-03 14:39:44 -07002325 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2326 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2327 CAM_QCOM_FEATURE_GOOG_ZOOM;
2328 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002329 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002330 } else {
2331 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2332 CAM_STREAM_TYPE_PREVIEW;
2333 if (m_bTnrEnabled && m_bTnrPreview) {
2334 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2335 CAM_QCOM_FEATURE_CPP_TNR;
2336 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2337 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2338 ~CAM_QCOM_FEATURE_CDS;
2339 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002340 if(!m_bSwTnrPreview) {
2341 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2342 ~CAM_QTI_FEATURE_SW_TNR;
2343 }
Binhao Line406f062017-05-03 14:39:44 -07002344 if (is_goog_zoom_preview_enabled) {
2345 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2346 CAM_QCOM_FEATURE_GOOG_ZOOM;
2347 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002348 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002349 padding_info.width_padding = mSurfaceStridePadding;
2350 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002351 previewSize.width = (int32_t)newStream->width;
2352 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002353 }
2354 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2355 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2356 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2357 newStream->height;
2358 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2359 newStream->width;
2360 }
2361 }
2362 break;
2363 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002364 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002365 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2366 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2367 if (bUseCommonFeatureMask)
2368 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2369 commonFeatureMask;
2370 else
2371 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2372 CAM_QCOM_FEATURE_NONE;
2373 } else {
2374 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2375 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2376 }
2377 break;
2378 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002379 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002380 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2381 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2382 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2383 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2384 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002385 /* Remove rotation if it is not supported
2386 for 4K LiveVideo snapshot case (online processing) */
2387 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2388 CAM_QCOM_FEATURE_ROTATION)) {
2389 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2390 &= ~CAM_QCOM_FEATURE_ROTATION;
2391 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002392 } else {
2393 if (bUseCommonFeatureMask &&
2394 isOnEncoder(maxViewfinderSize, newStream->width,
2395 newStream->height)) {
2396 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2397 } else {
2398 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2399 }
2400 }
2401 if (isZsl) {
2402 if (zslStream) {
2403 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2404 (int32_t)zslStream->width;
2405 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2406 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002407 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2408 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002409 } else {
2410 LOGE("Error, No ZSL stream identified");
2411 pthread_mutex_unlock(&mMutex);
2412 return -EINVAL;
2413 }
2414 } else if (m_bIs4KVideo) {
2415 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2416 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2417 } else if (bYuv888OverrideJpeg) {
2418 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2419 (int32_t)largeYuv888Size.width;
2420 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2421 (int32_t)largeYuv888Size.height;
2422 }
2423 break;
2424 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2425 case HAL_PIXEL_FORMAT_RAW16:
2426 case HAL_PIXEL_FORMAT_RAW10:
2427 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2428 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2429 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002430 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2431 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2432 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2433 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2434 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2435 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2436 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2437 gCamCapability[mCameraId]->dt[mPDIndex];
2438 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2439 gCamCapability[mCameraId]->vc[mPDIndex];
2440 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002441 break;
2442 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002443 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002444 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2445 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2446 break;
2447 }
2448 }
2449
2450 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2451 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2452 gCamCapability[mCameraId]->color_arrangement);
2453
2454 if (newStream->priv == NULL) {
2455 //New stream, construct channel
2456 switch (newStream->stream_type) {
2457 case CAMERA3_STREAM_INPUT:
2458 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2459 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2460 break;
2461 case CAMERA3_STREAM_BIDIRECTIONAL:
2462 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2463 GRALLOC_USAGE_HW_CAMERA_WRITE;
2464 break;
2465 case CAMERA3_STREAM_OUTPUT:
2466 /* For video encoding stream, set read/write rarely
2467 * flag so that they may be set to un-cached */
2468 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2469 newStream->usage |=
2470 (GRALLOC_USAGE_SW_READ_RARELY |
2471 GRALLOC_USAGE_SW_WRITE_RARELY |
2472 GRALLOC_USAGE_HW_CAMERA_WRITE);
2473 else if (IS_USAGE_ZSL(newStream->usage))
2474 {
2475 LOGD("ZSL usage flag skipping");
2476 }
2477 else if (newStream == zslStream
2478 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2479 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2480 } else
2481 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2482 break;
2483 default:
2484 LOGE("Invalid stream_type %d", newStream->stream_type);
2485 break;
2486 }
2487
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002488 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002489 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2490 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2491 QCamera3ProcessingChannel *channel = NULL;
2492 switch (newStream->format) {
2493 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2494 if ((newStream->usage &
2495 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2496 (streamList->operation_mode ==
2497 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2498 ) {
2499 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2500 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002501 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002502 this,
2503 newStream,
2504 (cam_stream_type_t)
2505 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2506 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2507 mMetadataChannel,
2508 0); //heap buffers are not required for HFR video channel
2509 if (channel == NULL) {
2510 LOGE("allocation of channel failed");
2511 pthread_mutex_unlock(&mMutex);
2512 return -ENOMEM;
2513 }
2514 //channel->getNumBuffers() will return 0 here so use
2515 //MAX_INFLIGH_HFR_REQUESTS
2516 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2517 newStream->priv = channel;
2518 LOGI("num video buffers in HFR mode: %d",
2519 MAX_INFLIGHT_HFR_REQUESTS);
2520 } else {
2521 /* Copy stream contents in HFR preview only case to create
2522 * dummy batch channel so that sensor streaming is in
2523 * HFR mode */
2524 if (!m_bIsVideo && (streamList->operation_mode ==
2525 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2526 mDummyBatchStream = *newStream;
2527 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002528 int bufferCount = MAX_INFLIGHT_REQUESTS;
2529 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2530 CAM_STREAM_TYPE_VIDEO) {
2531 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2532 bufferCount = MAX_VIDEO_BUFFERS;
2533 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002534 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2535 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002536 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002537 this,
2538 newStream,
2539 (cam_stream_type_t)
2540 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2541 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2542 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002543 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002544 if (channel == NULL) {
2545 LOGE("allocation of channel failed");
2546 pthread_mutex_unlock(&mMutex);
2547 return -ENOMEM;
2548 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002549 /* disable UBWC for preview, though supported,
2550 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002551 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002552 (previewSize.width == (int32_t)videoWidth)&&
2553 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002554 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002555 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002556 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002557 /* When goog_zoom is linked to the preview or video stream,
2558 * disable ubwc to the linked stream */
2559 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2560 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2561 channel->setUBWCEnabled(false);
2562 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002563 newStream->max_buffers = channel->getNumBuffers();
2564 newStream->priv = channel;
2565 }
2566 break;
2567 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2568 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2569 mChannelHandle,
2570 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002571 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002572 this,
2573 newStream,
2574 (cam_stream_type_t)
2575 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2576 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2577 mMetadataChannel);
2578 if (channel == NULL) {
2579 LOGE("allocation of YUV channel failed");
2580 pthread_mutex_unlock(&mMutex);
2581 return -ENOMEM;
2582 }
2583 newStream->max_buffers = channel->getNumBuffers();
2584 newStream->priv = channel;
2585 break;
2586 }
2587 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2588 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002589 case HAL_PIXEL_FORMAT_RAW10: {
2590 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2591 (HAL_DATASPACE_DEPTH != newStream->data_space))
2592 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002593 mRawChannel = new QCamera3RawChannel(
2594 mCameraHandle->camera_handle, mChannelHandle,
2595 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002596 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002597 this, newStream,
2598 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002599 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002600 if (mRawChannel == NULL) {
2601 LOGE("allocation of raw channel failed");
2602 pthread_mutex_unlock(&mMutex);
2603 return -ENOMEM;
2604 }
2605 newStream->max_buffers = mRawChannel->getNumBuffers();
2606 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2607 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002608 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002609 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002610 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2611 mDepthChannel = new QCamera3DepthChannel(
2612 mCameraHandle->camera_handle, mChannelHandle,
2613 mCameraHandle->ops, NULL, NULL, &padding_info,
2614 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2615 mMetadataChannel);
2616 if (NULL == mDepthChannel) {
2617 LOGE("Allocation of depth channel failed");
2618 pthread_mutex_unlock(&mMutex);
2619 return NO_MEMORY;
2620 }
2621 newStream->priv = mDepthChannel;
2622 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2623 } else {
2624 // Max live snapshot inflight buffer is 1. This is to mitigate
2625 // frame drop issues for video snapshot. The more buffers being
2626 // allocated, the more frame drops there are.
2627 mPictureChannel = new QCamera3PicChannel(
2628 mCameraHandle->camera_handle, mChannelHandle,
2629 mCameraHandle->ops, captureResultCb,
2630 setBufferErrorStatus, &padding_info, this, newStream,
2631 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2632 m_bIs4KVideo, isZsl, mMetadataChannel,
2633 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2634 if (mPictureChannel == NULL) {
2635 LOGE("allocation of channel failed");
2636 pthread_mutex_unlock(&mMutex);
2637 return -ENOMEM;
2638 }
2639 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2640 newStream->max_buffers = mPictureChannel->getNumBuffers();
2641 mPictureChannel->overrideYuvSize(
2642 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2643 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002644 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002645 break;
2646
2647 default:
2648 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002649 pthread_mutex_unlock(&mMutex);
2650 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002651 }
2652 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2653 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2654 } else {
2655 LOGE("Error, Unknown stream type");
2656 pthread_mutex_unlock(&mMutex);
2657 return -EINVAL;
2658 }
2659
2660 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002661 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2662 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002663 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002664 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002665 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2666 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2667 }
2668 }
2669
2670 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2671 it != mStreamInfo.end(); it++) {
2672 if ((*it)->stream == newStream) {
2673 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2674 break;
2675 }
2676 }
2677 } else {
2678 // Channel already exists for this stream
2679 // Do nothing for now
2680 }
2681 padding_info = gCamCapability[mCameraId]->padding_info;
2682
Emilian Peev7650c122017-01-19 08:24:33 -08002683 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002684 * since there is no real stream associated with it
2685 */
Emilian Peev7650c122017-01-19 08:24:33 -08002686 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002687 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2688 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002689 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002690 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002691 }
2692
Binhao Lincdb362a2017-04-20 13:31:54 -07002693 // By default, preview stream TNR is disabled.
2694 // Enable TNR to the preview stream if all conditions below are satisfied:
2695 // 1. resolution <= 1080p.
2696 // 2. preview resolution == video resolution.
2697 // 3. video stream TNR is enabled.
2698 // 4. EIS2.0
2699 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2700 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2701 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2702 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2703 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2704 video_stream->width == preview_stream->width &&
2705 video_stream->height == preview_stream->height) {
2706 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2707 CAM_QCOM_FEATURE_CPP_TNR;
2708 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2709 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2710 ~CAM_QCOM_FEATURE_CDS;
2711 }
2712 }
2713
Thierry Strudel2896d122017-02-23 19:18:03 -08002714 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2715 onlyRaw = false;
2716 }
2717
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002718 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002719 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002720 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002721 cam_analysis_info_t analysisInfo;
2722 int32_t ret = NO_ERROR;
2723 ret = mCommon.getAnalysisInfo(
2724 FALSE,
2725 analysisFeatureMask,
2726 &analysisInfo);
2727 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002728 cam_color_filter_arrangement_t analysis_color_arrangement =
2729 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2730 CAM_FILTER_ARRANGEMENT_Y :
2731 gCamCapability[mCameraId]->color_arrangement);
2732 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2733 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002734 cam_dimension_t analysisDim;
2735 analysisDim = mCommon.getMatchingDimension(previewSize,
2736 analysisInfo.analysis_recommended_res);
2737
2738 mAnalysisChannel = new QCamera3SupportChannel(
2739 mCameraHandle->camera_handle,
2740 mChannelHandle,
2741 mCameraHandle->ops,
2742 &analysisInfo.analysis_padding_info,
2743 analysisFeatureMask,
2744 CAM_STREAM_TYPE_ANALYSIS,
2745 &analysisDim,
2746 (analysisInfo.analysis_format
2747 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2748 : CAM_FORMAT_YUV_420_NV21),
2749 analysisInfo.hw_analysis_supported,
2750 gCamCapability[mCameraId]->color_arrangement,
2751 this,
2752 0); // force buffer count to 0
2753 } else {
2754 LOGW("getAnalysisInfo failed, ret = %d", ret);
2755 }
2756 if (!mAnalysisChannel) {
2757 LOGW("Analysis channel cannot be created");
2758 }
2759 }
2760
Thierry Strudel3d639192016-09-09 11:52:26 -07002761 //RAW DUMP channel
2762 if (mEnableRawDump && isRawStreamRequested == false){
2763 cam_dimension_t rawDumpSize;
2764 rawDumpSize = getMaxRawSize(mCameraId);
2765 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2766 setPAAFSupport(rawDumpFeatureMask,
2767 CAM_STREAM_TYPE_RAW,
2768 gCamCapability[mCameraId]->color_arrangement);
2769 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2770 mChannelHandle,
2771 mCameraHandle->ops,
2772 rawDumpSize,
2773 &padding_info,
2774 this, rawDumpFeatureMask);
2775 if (!mRawDumpChannel) {
2776 LOGE("Raw Dump channel cannot be created");
2777 pthread_mutex_unlock(&mMutex);
2778 return -ENOMEM;
2779 }
2780 }
2781
Thierry Strudel3d639192016-09-09 11:52:26 -07002782 if (mAnalysisChannel) {
2783 cam_analysis_info_t analysisInfo;
2784 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2785 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2786 CAM_STREAM_TYPE_ANALYSIS;
2787 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2788 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002789 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002790 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2791 &analysisInfo);
2792 if (rc != NO_ERROR) {
2793 LOGE("getAnalysisInfo failed, ret = %d", rc);
2794 pthread_mutex_unlock(&mMutex);
2795 return rc;
2796 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002797 cam_color_filter_arrangement_t analysis_color_arrangement =
2798 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2799 CAM_FILTER_ARRANGEMENT_Y :
2800 gCamCapability[mCameraId]->color_arrangement);
2801 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2802 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2803 analysis_color_arrangement);
2804
Thierry Strudel3d639192016-09-09 11:52:26 -07002805 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002806 mCommon.getMatchingDimension(previewSize,
2807 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002808 mStreamConfigInfo.num_streams++;
2809 }
2810
Thierry Strudel2896d122017-02-23 19:18:03 -08002811 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002812 cam_analysis_info_t supportInfo;
2813 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2814 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2815 setPAAFSupport(callbackFeatureMask,
2816 CAM_STREAM_TYPE_CALLBACK,
2817 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002818 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002819 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002820 if (ret != NO_ERROR) {
2821 /* Ignore the error for Mono camera
2822 * because the PAAF bit mask is only set
2823 * for CAM_STREAM_TYPE_ANALYSIS stream type
2824 */
2825 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2826 LOGW("getAnalysisInfo failed, ret = %d", ret);
2827 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002828 }
2829 mSupportChannel = new QCamera3SupportChannel(
2830 mCameraHandle->camera_handle,
2831 mChannelHandle,
2832 mCameraHandle->ops,
2833 &gCamCapability[mCameraId]->padding_info,
2834 callbackFeatureMask,
2835 CAM_STREAM_TYPE_CALLBACK,
2836 &QCamera3SupportChannel::kDim,
2837 CAM_FORMAT_YUV_420_NV21,
2838 supportInfo.hw_analysis_supported,
2839 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002840 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002841 if (!mSupportChannel) {
2842 LOGE("dummy channel cannot be created");
2843 pthread_mutex_unlock(&mMutex);
2844 return -ENOMEM;
2845 }
2846 }
2847
2848 if (mSupportChannel) {
2849 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2850 QCamera3SupportChannel::kDim;
2851 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2852 CAM_STREAM_TYPE_CALLBACK;
2853 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2854 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2855 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2856 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2857 gCamCapability[mCameraId]->color_arrangement);
2858 mStreamConfigInfo.num_streams++;
2859 }
2860
2861 if (mRawDumpChannel) {
2862 cam_dimension_t rawSize;
2863 rawSize = getMaxRawSize(mCameraId);
2864 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2865 rawSize;
2866 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2867 CAM_STREAM_TYPE_RAW;
2868 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2869 CAM_QCOM_FEATURE_NONE;
2870 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2871 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2872 gCamCapability[mCameraId]->color_arrangement);
2873 mStreamConfigInfo.num_streams++;
2874 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002875
2876 if (mHdrPlusRawSrcChannel) {
2877 cam_dimension_t rawSize;
2878 rawSize = getMaxRawSize(mCameraId);
2879 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2880 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2881 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2882 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2883 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2884 gCamCapability[mCameraId]->color_arrangement);
2885 mStreamConfigInfo.num_streams++;
2886 }
2887
Thierry Strudel3d639192016-09-09 11:52:26 -07002888 /* In HFR mode, if video stream is not added, create a dummy channel so that
2889 * ISP can create a batch mode even for preview only case. This channel is
2890 * never 'start'ed (no stream-on), it is only 'initialized' */
2891 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2892 !m_bIsVideo) {
2893 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2894 setPAAFSupport(dummyFeatureMask,
2895 CAM_STREAM_TYPE_VIDEO,
2896 gCamCapability[mCameraId]->color_arrangement);
2897 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2898 mChannelHandle,
2899 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002900 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002901 this,
2902 &mDummyBatchStream,
2903 CAM_STREAM_TYPE_VIDEO,
2904 dummyFeatureMask,
2905 mMetadataChannel);
2906 if (NULL == mDummyBatchChannel) {
2907 LOGE("creation of mDummyBatchChannel failed."
2908 "Preview will use non-hfr sensor mode ");
2909 }
2910 }
2911 if (mDummyBatchChannel) {
2912 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2913 mDummyBatchStream.width;
2914 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2915 mDummyBatchStream.height;
2916 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2917 CAM_STREAM_TYPE_VIDEO;
2918 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2919 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2920 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2921 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2922 gCamCapability[mCameraId]->color_arrangement);
2923 mStreamConfigInfo.num_streams++;
2924 }
2925
2926 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2927 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002928 m_bIs4KVideo ? 0 :
2929 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002930
2931 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2932 for (pendingRequestIterator i = mPendingRequestsList.begin();
2933 i != mPendingRequestsList.end();) {
2934 i = erasePendingRequest(i);
2935 }
2936 mPendingFrameDropList.clear();
2937 // Initialize/Reset the pending buffers list
2938 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2939 req.mPendingBufferList.clear();
2940 }
2941 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2942
Thierry Strudel3d639192016-09-09 11:52:26 -07002943 mCurJpegMeta.clear();
2944 //Get min frame duration for this streams configuration
2945 deriveMinFrameDuration();
2946
Chien-Yu Chenee335912017-02-09 17:53:20 -08002947 mFirstPreviewIntentSeen = false;
2948
2949 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002950 {
2951 Mutex::Autolock l(gHdrPlusClientLock);
2952 disableHdrPlusModeLocked();
2953 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002954
Thierry Strudel3d639192016-09-09 11:52:26 -07002955 // Update state
2956 mState = CONFIGURED;
2957
Shuzhen Wang3c077d72017-04-20 22:48:59 -07002958 mFirstMetadataCallback = true;
2959
Thierry Strudel3d639192016-09-09 11:52:26 -07002960 pthread_mutex_unlock(&mMutex);
2961
2962 return rc;
2963}
2964
2965/*===========================================================================
2966 * FUNCTION : validateCaptureRequest
2967 *
2968 * DESCRIPTION: validate a capture request from camera service
2969 *
2970 * PARAMETERS :
2971 * @request : request from framework to process
2972 *
2973 * RETURN :
2974 *
2975 *==========================================================================*/
2976int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002977 camera3_capture_request_t *request,
2978 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002979{
2980 ssize_t idx = 0;
2981 const camera3_stream_buffer_t *b;
2982 CameraMetadata meta;
2983
2984 /* Sanity check the request */
2985 if (request == NULL) {
2986 LOGE("NULL capture request");
2987 return BAD_VALUE;
2988 }
2989
2990 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2991 /*settings cannot be null for the first request*/
2992 return BAD_VALUE;
2993 }
2994
2995 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002996 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2997 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002998 LOGE("Request %d: No output buffers provided!",
2999 __FUNCTION__, frameNumber);
3000 return BAD_VALUE;
3001 }
3002 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3003 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3004 request->num_output_buffers, MAX_NUM_STREAMS);
3005 return BAD_VALUE;
3006 }
3007 if (request->input_buffer != NULL) {
3008 b = request->input_buffer;
3009 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3010 LOGE("Request %d: Buffer %ld: Status not OK!",
3011 frameNumber, (long)idx);
3012 return BAD_VALUE;
3013 }
3014 if (b->release_fence != -1) {
3015 LOGE("Request %d: Buffer %ld: Has a release fence!",
3016 frameNumber, (long)idx);
3017 return BAD_VALUE;
3018 }
3019 if (b->buffer == NULL) {
3020 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3021 frameNumber, (long)idx);
3022 return BAD_VALUE;
3023 }
3024 }
3025
3026 // Validate all buffers
3027 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003028 if (b == NULL) {
3029 return BAD_VALUE;
3030 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003031 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003032 QCamera3ProcessingChannel *channel =
3033 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3034 if (channel == NULL) {
3035 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3036 frameNumber, (long)idx);
3037 return BAD_VALUE;
3038 }
3039 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3040 LOGE("Request %d: Buffer %ld: Status not OK!",
3041 frameNumber, (long)idx);
3042 return BAD_VALUE;
3043 }
3044 if (b->release_fence != -1) {
3045 LOGE("Request %d: Buffer %ld: Has a release fence!",
3046 frameNumber, (long)idx);
3047 return BAD_VALUE;
3048 }
3049 if (b->buffer == NULL) {
3050 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3051 frameNumber, (long)idx);
3052 return BAD_VALUE;
3053 }
3054 if (*(b->buffer) == NULL) {
3055 LOGE("Request %d: Buffer %ld: NULL private handle!",
3056 frameNumber, (long)idx);
3057 return BAD_VALUE;
3058 }
3059 idx++;
3060 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003061 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003062 return NO_ERROR;
3063}
3064
3065/*===========================================================================
3066 * FUNCTION : deriveMinFrameDuration
3067 *
3068 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3069 * on currently configured streams.
3070 *
3071 * PARAMETERS : NONE
3072 *
3073 * RETURN : NONE
3074 *
3075 *==========================================================================*/
3076void QCamera3HardwareInterface::deriveMinFrameDuration()
3077{
3078 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3079
3080 maxJpegDim = 0;
3081 maxProcessedDim = 0;
3082 maxRawDim = 0;
3083
3084 // Figure out maximum jpeg, processed, and raw dimensions
3085 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3086 it != mStreamInfo.end(); it++) {
3087
3088 // Input stream doesn't have valid stream_type
3089 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3090 continue;
3091
3092 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3093 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3094 if (dimension > maxJpegDim)
3095 maxJpegDim = dimension;
3096 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3097 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3098 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3099 if (dimension > maxRawDim)
3100 maxRawDim = dimension;
3101 } else {
3102 if (dimension > maxProcessedDim)
3103 maxProcessedDim = dimension;
3104 }
3105 }
3106
3107 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3108 MAX_SIZES_CNT);
3109
3110 //Assume all jpeg dimensions are in processed dimensions.
3111 if (maxJpegDim > maxProcessedDim)
3112 maxProcessedDim = maxJpegDim;
3113 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3114 if (maxProcessedDim > maxRawDim) {
3115 maxRawDim = INT32_MAX;
3116
3117 for (size_t i = 0; i < count; i++) {
3118 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3119 gCamCapability[mCameraId]->raw_dim[i].height;
3120 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3121 maxRawDim = dimension;
3122 }
3123 }
3124
3125 //Find minimum durations for processed, jpeg, and raw
3126 for (size_t i = 0; i < count; i++) {
3127 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3128 gCamCapability[mCameraId]->raw_dim[i].height) {
3129 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3130 break;
3131 }
3132 }
3133 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3134 for (size_t i = 0; i < count; i++) {
3135 if (maxProcessedDim ==
3136 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3137 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3138 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3139 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3140 break;
3141 }
3142 }
3143}
3144
3145/*===========================================================================
3146 * FUNCTION : getMinFrameDuration
3147 *
3148 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3149 * and current request configuration.
3150 *
3151 * PARAMETERS : @request: requset sent by the frameworks
3152 *
3153 * RETURN : min farme duration for a particular request
3154 *
3155 *==========================================================================*/
3156int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3157{
3158 bool hasJpegStream = false;
3159 bool hasRawStream = false;
3160 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3161 const camera3_stream_t *stream = request->output_buffers[i].stream;
3162 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3163 hasJpegStream = true;
3164 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3165 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3166 stream->format == HAL_PIXEL_FORMAT_RAW16)
3167 hasRawStream = true;
3168 }
3169
3170 if (!hasJpegStream)
3171 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3172 else
3173 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3174}
3175
3176/*===========================================================================
3177 * FUNCTION : handleBuffersDuringFlushLock
3178 *
3179 * DESCRIPTION: Account for buffers returned from back-end during flush
3180 * This function is executed while mMutex is held by the caller.
3181 *
3182 * PARAMETERS :
3183 * @buffer: image buffer for the callback
3184 *
3185 * RETURN :
3186 *==========================================================================*/
3187void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3188{
3189 bool buffer_found = false;
3190 for (List<PendingBuffersInRequest>::iterator req =
3191 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3192 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3193 for (List<PendingBufferInfo>::iterator i =
3194 req->mPendingBufferList.begin();
3195 i != req->mPendingBufferList.end(); i++) {
3196 if (i->buffer == buffer->buffer) {
3197 mPendingBuffersMap.numPendingBufsAtFlush--;
3198 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3199 buffer->buffer, req->frame_number,
3200 mPendingBuffersMap.numPendingBufsAtFlush);
3201 buffer_found = true;
3202 break;
3203 }
3204 }
3205 if (buffer_found) {
3206 break;
3207 }
3208 }
3209 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3210 //signal the flush()
3211 LOGD("All buffers returned to HAL. Continue flush");
3212 pthread_cond_signal(&mBuffersCond);
3213 }
3214}
3215
Thierry Strudel3d639192016-09-09 11:52:26 -07003216/*===========================================================================
3217 * FUNCTION : handleBatchMetadata
3218 *
3219 * DESCRIPTION: Handles metadata buffer callback in batch mode
3220 *
3221 * PARAMETERS : @metadata_buf: metadata buffer
3222 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3223 * the meta buf in this method
3224 *
3225 * RETURN :
3226 *
3227 *==========================================================================*/
3228void QCamera3HardwareInterface::handleBatchMetadata(
3229 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3230{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003231 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003232
3233 if (NULL == metadata_buf) {
3234 LOGE("metadata_buf is NULL");
3235 return;
3236 }
3237 /* In batch mode, the metdata will contain the frame number and timestamp of
3238 * the last frame in the batch. Eg: a batch containing buffers from request
3239 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3240 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3241 * multiple process_capture_results */
3242 metadata_buffer_t *metadata =
3243 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3244 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3245 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3246 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3247 uint32_t frame_number = 0, urgent_frame_number = 0;
3248 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3249 bool invalid_metadata = false;
3250 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3251 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003252 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003253
3254 int32_t *p_frame_number_valid =
3255 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3256 uint32_t *p_frame_number =
3257 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3258 int64_t *p_capture_time =
3259 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3260 int32_t *p_urgent_frame_number_valid =
3261 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3262 uint32_t *p_urgent_frame_number =
3263 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3264
3265 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3266 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3267 (NULL == p_urgent_frame_number)) {
3268 LOGE("Invalid metadata");
3269 invalid_metadata = true;
3270 } else {
3271 frame_number_valid = *p_frame_number_valid;
3272 last_frame_number = *p_frame_number;
3273 last_frame_capture_time = *p_capture_time;
3274 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3275 last_urgent_frame_number = *p_urgent_frame_number;
3276 }
3277
3278 /* In batchmode, when no video buffers are requested, set_parms are sent
3279 * for every capture_request. The difference between consecutive urgent
3280 * frame numbers and frame numbers should be used to interpolate the
3281 * corresponding frame numbers and time stamps */
3282 pthread_mutex_lock(&mMutex);
3283 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003284 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3285 if(idx < 0) {
3286 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3287 last_urgent_frame_number);
3288 mState = ERROR;
3289 pthread_mutex_unlock(&mMutex);
3290 return;
3291 }
3292 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003293 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3294 first_urgent_frame_number;
3295
3296 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3297 urgent_frame_number_valid,
3298 first_urgent_frame_number, last_urgent_frame_number);
3299 }
3300
3301 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003302 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3303 if(idx < 0) {
3304 LOGE("Invalid frame number received: %d. Irrecoverable error",
3305 last_frame_number);
3306 mState = ERROR;
3307 pthread_mutex_unlock(&mMutex);
3308 return;
3309 }
3310 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003311 frameNumDiff = last_frame_number + 1 -
3312 first_frame_number;
3313 mPendingBatchMap.removeItem(last_frame_number);
3314
3315 LOGD("frm: valid: %d frm_num: %d - %d",
3316 frame_number_valid,
3317 first_frame_number, last_frame_number);
3318
3319 }
3320 pthread_mutex_unlock(&mMutex);
3321
3322 if (urgent_frame_number_valid || frame_number_valid) {
3323 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3324 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3325 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3326 urgentFrameNumDiff, last_urgent_frame_number);
3327 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3328 LOGE("frameNumDiff: %d frameNum: %d",
3329 frameNumDiff, last_frame_number);
3330 }
3331
3332 for (size_t i = 0; i < loopCount; i++) {
3333 /* handleMetadataWithLock is called even for invalid_metadata for
3334 * pipeline depth calculation */
3335 if (!invalid_metadata) {
3336 /* Infer frame number. Batch metadata contains frame number of the
3337 * last frame */
3338 if (urgent_frame_number_valid) {
3339 if (i < urgentFrameNumDiff) {
3340 urgent_frame_number =
3341 first_urgent_frame_number + i;
3342 LOGD("inferred urgent frame_number: %d",
3343 urgent_frame_number);
3344 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3345 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3346 } else {
3347 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3348 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3349 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3350 }
3351 }
3352
3353 /* Infer frame number. Batch metadata contains frame number of the
3354 * last frame */
3355 if (frame_number_valid) {
3356 if (i < frameNumDiff) {
3357 frame_number = first_frame_number + i;
3358 LOGD("inferred frame_number: %d", frame_number);
3359 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3360 CAM_INTF_META_FRAME_NUMBER, frame_number);
3361 } else {
3362 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3363 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3364 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3365 }
3366 }
3367
3368 if (last_frame_capture_time) {
3369 //Infer timestamp
3370 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003371 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003372 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003373 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003374 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3375 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3376 LOGD("batch capture_time: %lld, capture_time: %lld",
3377 last_frame_capture_time, capture_time);
3378 }
3379 }
3380 pthread_mutex_lock(&mMutex);
3381 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003382 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003383 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3384 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003385 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003386 pthread_mutex_unlock(&mMutex);
3387 }
3388
3389 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003390 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003391 mMetadataChannel->bufDone(metadata_buf);
3392 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003393 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003394 }
3395}
3396
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003397void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3398 camera3_error_msg_code_t errorCode)
3399{
3400 camera3_notify_msg_t notify_msg;
3401 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3402 notify_msg.type = CAMERA3_MSG_ERROR;
3403 notify_msg.message.error.error_code = errorCode;
3404 notify_msg.message.error.error_stream = NULL;
3405 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003406 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003407
3408 return;
3409}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003410
3411/*===========================================================================
3412 * FUNCTION : sendPartialMetadataWithLock
3413 *
3414 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3415 *
3416 * PARAMETERS : @metadata: metadata buffer
3417 * @requestIter: The iterator for the pending capture request for
3418 * which the partial result is being sen
3419 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3420 * last urgent metadata in a batch. Always true for non-batch mode
3421 *
3422 * RETURN :
3423 *
3424 *==========================================================================*/
3425
3426void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3427 metadata_buffer_t *metadata,
3428 const pendingRequestIterator requestIter,
3429 bool lastUrgentMetadataInBatch)
3430{
3431 camera3_capture_result_t result;
3432 memset(&result, 0, sizeof(camera3_capture_result_t));
3433
3434 requestIter->partial_result_cnt++;
3435
3436 // Extract 3A metadata
3437 result.result = translateCbUrgentMetadataToResultMetadata(
3438 metadata, lastUrgentMetadataInBatch);
3439 // Populate metadata result
3440 result.frame_number = requestIter->frame_number;
3441 result.num_output_buffers = 0;
3442 result.output_buffers = NULL;
3443 result.partial_result = requestIter->partial_result_cnt;
3444
3445 {
3446 Mutex::Autolock l(gHdrPlusClientLock);
3447 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3448 // Notify HDR+ client about the partial metadata.
3449 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3450 result.partial_result == PARTIAL_RESULT_COUNT);
3451 }
3452 }
3453
3454 orchestrateResult(&result);
3455 LOGD("urgent frame_number = %u", result.frame_number);
3456 free_camera_metadata((camera_metadata_t *)result.result);
3457}
3458
Thierry Strudel3d639192016-09-09 11:52:26 -07003459/*===========================================================================
3460 * FUNCTION : handleMetadataWithLock
3461 *
3462 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3463 *
3464 * PARAMETERS : @metadata_buf: metadata buffer
3465 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3466 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003467 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3468 * last urgent metadata in a batch. Always true for non-batch mode
3469 * @lastMetadataInBatch: Boolean to indicate whether this is the
3470 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003471 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3472 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003473 *
3474 * RETURN :
3475 *
3476 *==========================================================================*/
3477void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003478 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003479 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3480 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003481{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003482 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003483 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3484 //during flush do not send metadata from this thread
3485 LOGD("not sending metadata during flush or when mState is error");
3486 if (free_and_bufdone_meta_buf) {
3487 mMetadataChannel->bufDone(metadata_buf);
3488 free(metadata_buf);
3489 }
3490 return;
3491 }
3492
3493 //not in flush
3494 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3495 int32_t frame_number_valid, urgent_frame_number_valid;
3496 uint32_t frame_number, urgent_frame_number;
3497 int64_t capture_time;
3498 nsecs_t currentSysTime;
3499
3500 int32_t *p_frame_number_valid =
3501 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3502 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3503 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3504 int32_t *p_urgent_frame_number_valid =
3505 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3506 uint32_t *p_urgent_frame_number =
3507 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3508 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3509 metadata) {
3510 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3511 *p_frame_number_valid, *p_frame_number);
3512 }
3513
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003514 camera_metadata_t *resultMetadata = nullptr;
3515
Thierry Strudel3d639192016-09-09 11:52:26 -07003516 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3517 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3518 LOGE("Invalid metadata");
3519 if (free_and_bufdone_meta_buf) {
3520 mMetadataChannel->bufDone(metadata_buf);
3521 free(metadata_buf);
3522 }
3523 goto done_metadata;
3524 }
3525 frame_number_valid = *p_frame_number_valid;
3526 frame_number = *p_frame_number;
3527 capture_time = *p_capture_time;
3528 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3529 urgent_frame_number = *p_urgent_frame_number;
3530 currentSysTime = systemTime(CLOCK_MONOTONIC);
3531
3532 // Detect if buffers from any requests are overdue
3533 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003534 int64_t timeout;
3535 {
3536 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3537 // If there is a pending HDR+ request, the following requests may be blocked until the
3538 // HDR+ request is done. So allow a longer timeout.
3539 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3540 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3541 }
3542
3543 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003544 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003545 assert(missed.stream->priv);
3546 if (missed.stream->priv) {
3547 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3548 assert(ch->mStreams[0]);
3549 if (ch->mStreams[0]) {
3550 LOGE("Cancel missing frame = %d, buffer = %p,"
3551 "stream type = %d, stream format = %d",
3552 req.frame_number, missed.buffer,
3553 ch->mStreams[0]->getMyType(), missed.stream->format);
3554 ch->timeoutFrame(req.frame_number);
3555 }
3556 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003557 }
3558 }
3559 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003560 //For the very first metadata callback, regardless whether it contains valid
3561 //frame number, send the partial metadata for the jumpstarting requests.
3562 //Note that this has to be done even if the metadata doesn't contain valid
3563 //urgent frame number, because in the case only 1 request is ever submitted
3564 //to HAL, there won't be subsequent valid urgent frame number.
3565 if (mFirstMetadataCallback) {
3566 for (pendingRequestIterator i =
3567 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3568 if (i->bUseFirstPartial) {
3569 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3570 }
3571 }
3572 mFirstMetadataCallback = false;
3573 }
3574
Thierry Strudel3d639192016-09-09 11:52:26 -07003575 //Partial result on process_capture_result for timestamp
3576 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003577 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003578
3579 //Recieved an urgent Frame Number, handle it
3580 //using partial results
3581 for (pendingRequestIterator i =
3582 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3583 LOGD("Iterator Frame = %d urgent frame = %d",
3584 i->frame_number, urgent_frame_number);
3585
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003586 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003587 (i->partial_result_cnt == 0)) {
3588 LOGE("Error: HAL missed urgent metadata for frame number %d",
3589 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003590 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003591 }
3592
3593 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003594 i->partial_result_cnt == 0) {
3595 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003596 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3597 // Instant AEC settled for this frame.
3598 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3599 mInstantAECSettledFrameNumber = urgent_frame_number;
3600 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003601 break;
3602 }
3603 }
3604 }
3605
3606 if (!frame_number_valid) {
3607 LOGD("Not a valid normal frame number, used as SOF only");
3608 if (free_and_bufdone_meta_buf) {
3609 mMetadataChannel->bufDone(metadata_buf);
3610 free(metadata_buf);
3611 }
3612 goto done_metadata;
3613 }
3614 LOGH("valid frame_number = %u, capture_time = %lld",
3615 frame_number, capture_time);
3616
Emilian Peev7650c122017-01-19 08:24:33 -08003617 if (metadata->is_depth_data_valid) {
3618 handleDepthDataLocked(metadata->depth_data, frame_number);
3619 }
3620
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003621 // Check whether any stream buffer corresponding to this is dropped or not
3622 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3623 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3624 for (auto & pendingRequest : mPendingRequestsList) {
3625 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3626 mInstantAECSettledFrameNumber)) {
3627 camera3_notify_msg_t notify_msg = {};
3628 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003629 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003630 QCamera3ProcessingChannel *channel =
3631 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003632 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003633 if (p_cam_frame_drop) {
3634 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003635 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003636 // Got the stream ID for drop frame.
3637 dropFrame = true;
3638 break;
3639 }
3640 }
3641 } else {
3642 // This is instant AEC case.
3643 // For instant AEC drop the stream untill AEC is settled.
3644 dropFrame = true;
3645 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003646
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003647 if (dropFrame) {
3648 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3649 if (p_cam_frame_drop) {
3650 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003651 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003652 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003653 } else {
3654 // For instant AEC, inform frame drop and frame number
3655 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3656 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003657 pendingRequest.frame_number, streamID,
3658 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003659 }
3660 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003661 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003662 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003663 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003664 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003665 if (p_cam_frame_drop) {
3666 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003667 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003668 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003669 } else {
3670 // For instant AEC, inform frame drop and frame number
3671 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3672 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003673 pendingRequest.frame_number, streamID,
3674 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003675 }
3676 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003677 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003678 PendingFrameDrop.stream_ID = streamID;
3679 // Add the Frame drop info to mPendingFrameDropList
3680 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003681 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003682 }
3683 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003684 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003685
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003686 for (auto & pendingRequest : mPendingRequestsList) {
3687 // Find the pending request with the frame number.
3688 if (pendingRequest.frame_number == frame_number) {
3689 // Update the sensor timestamp.
3690 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003691
Thierry Strudel3d639192016-09-09 11:52:26 -07003692
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003693 /* Set the timestamp in display metadata so that clients aware of
3694 private_handle such as VT can use this un-modified timestamps.
3695 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003696 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003697
Thierry Strudel3d639192016-09-09 11:52:26 -07003698 // Find channel requiring metadata, meaning internal offline postprocess
3699 // is needed.
3700 //TODO: for now, we don't support two streams requiring metadata at the same time.
3701 // (because we are not making copies, and metadata buffer is not reference counted.
3702 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003703 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3704 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003705 if (iter->need_metadata) {
3706 internalPproc = true;
3707 QCamera3ProcessingChannel *channel =
3708 (QCamera3ProcessingChannel *)iter->stream->priv;
3709 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003710 if(p_is_metabuf_queued != NULL) {
3711 *p_is_metabuf_queued = true;
3712 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003713 break;
3714 }
3715 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003716 for (auto itr = pendingRequest.internalRequestList.begin();
3717 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003718 if (itr->need_metadata) {
3719 internalPproc = true;
3720 QCamera3ProcessingChannel *channel =
3721 (QCamera3ProcessingChannel *)itr->stream->priv;
3722 channel->queueReprocMetadata(metadata_buf);
3723 break;
3724 }
3725 }
3726
Thierry Strudel54dc9782017-02-15 12:12:10 -08003727 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003728
3729 bool *enableZsl = nullptr;
3730 if (gExposeEnableZslKey) {
3731 enableZsl = &pendingRequest.enableZsl;
3732 }
3733
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003734 resultMetadata = translateFromHalMetadata(metadata,
3735 pendingRequest.timestamp, pendingRequest.request_id,
3736 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3737 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003738 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003739 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003740 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003741 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003742 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003743 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003744
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003745 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003746
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003747 if (pendingRequest.blob_request) {
3748 //Dump tuning metadata if enabled and available
3749 char prop[PROPERTY_VALUE_MAX];
3750 memset(prop, 0, sizeof(prop));
3751 property_get("persist.camera.dumpmetadata", prop, "0");
3752 int32_t enabled = atoi(prop);
3753 if (enabled && metadata->is_tuning_params_valid) {
3754 dumpMetadataToFile(metadata->tuning_params,
3755 mMetaFrameCount,
3756 enabled,
3757 "Snapshot",
3758 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003759 }
3760 }
3761
3762 if (!internalPproc) {
3763 LOGD("couldn't find need_metadata for this metadata");
3764 // Return metadata buffer
3765 if (free_and_bufdone_meta_buf) {
3766 mMetadataChannel->bufDone(metadata_buf);
3767 free(metadata_buf);
3768 }
3769 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003770
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003771 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003772 }
3773 }
3774
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003775 // Try to send out shutter callbacks and capture results.
3776 handlePendingResultsWithLock(frame_number, resultMetadata);
3777 return;
3778
Thierry Strudel3d639192016-09-09 11:52:26 -07003779done_metadata:
3780 for (pendingRequestIterator i = mPendingRequestsList.begin();
3781 i != mPendingRequestsList.end() ;i++) {
3782 i->pipeline_depth++;
3783 }
3784 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3785 unblockRequestIfNecessary();
3786}
3787
3788/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003789 * FUNCTION : handleDepthDataWithLock
3790 *
3791 * DESCRIPTION: Handles incoming depth data
3792 *
3793 * PARAMETERS : @depthData : Depth data
3794 * @frameNumber: Frame number of the incoming depth data
3795 *
3796 * RETURN :
3797 *
3798 *==========================================================================*/
3799void QCamera3HardwareInterface::handleDepthDataLocked(
3800 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3801 uint32_t currentFrameNumber;
3802 buffer_handle_t *depthBuffer;
3803
3804 if (nullptr == mDepthChannel) {
3805 LOGE("Depth channel not present!");
3806 return;
3807 }
3808
3809 camera3_stream_buffer_t resultBuffer =
3810 {.acquire_fence = -1,
3811 .release_fence = -1,
3812 .status = CAMERA3_BUFFER_STATUS_OK,
3813 .buffer = nullptr,
3814 .stream = mDepthChannel->getStream()};
3815 camera3_capture_result_t result =
3816 {.result = nullptr,
3817 .num_output_buffers = 1,
3818 .output_buffers = &resultBuffer,
3819 .partial_result = 0,
3820 .frame_number = 0};
3821
3822 do {
3823 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3824 if (nullptr == depthBuffer) {
3825 break;
3826 }
3827
3828 result.frame_number = currentFrameNumber;
3829 resultBuffer.buffer = depthBuffer;
3830 if (currentFrameNumber == frameNumber) {
3831 int32_t rc = mDepthChannel->populateDepthData(depthData,
3832 frameNumber);
3833 if (NO_ERROR != rc) {
3834 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3835 } else {
3836 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3837 }
3838 } else if (currentFrameNumber > frameNumber) {
3839 break;
3840 } else {
3841 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3842 {{currentFrameNumber, mDepthChannel->getStream(),
3843 CAMERA3_MSG_ERROR_BUFFER}}};
3844 orchestrateNotify(&notify_msg);
3845
3846 LOGE("Depth buffer for frame number: %d is missing "
3847 "returning back!", currentFrameNumber);
3848 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3849 }
3850 mDepthChannel->unmapBuffer(currentFrameNumber);
3851
3852 orchestrateResult(&result);
3853 } while (currentFrameNumber < frameNumber);
3854}
3855
3856/*===========================================================================
3857 * FUNCTION : notifyErrorFoPendingDepthData
3858 *
3859 * DESCRIPTION: Returns error for any pending depth buffers
3860 *
3861 * PARAMETERS : depthCh - depth channel that needs to get flushed
3862 *
3863 * RETURN :
3864 *
3865 *==========================================================================*/
3866void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3867 QCamera3DepthChannel *depthCh) {
3868 uint32_t currentFrameNumber;
3869 buffer_handle_t *depthBuffer;
3870
3871 if (nullptr == depthCh) {
3872 return;
3873 }
3874
3875 camera3_notify_msg_t notify_msg =
3876 {.type = CAMERA3_MSG_ERROR,
3877 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3878 camera3_stream_buffer_t resultBuffer =
3879 {.acquire_fence = -1,
3880 .release_fence = -1,
3881 .buffer = nullptr,
3882 .stream = depthCh->getStream(),
3883 .status = CAMERA3_BUFFER_STATUS_ERROR};
3884 camera3_capture_result_t result =
3885 {.result = nullptr,
3886 .frame_number = 0,
3887 .num_output_buffers = 1,
3888 .partial_result = 0,
3889 .output_buffers = &resultBuffer};
3890
3891 while (nullptr !=
3892 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3893 depthCh->unmapBuffer(currentFrameNumber);
3894
3895 notify_msg.message.error.frame_number = currentFrameNumber;
3896 orchestrateNotify(&notify_msg);
3897
3898 resultBuffer.buffer = depthBuffer;
3899 result.frame_number = currentFrameNumber;
3900 orchestrateResult(&result);
3901 };
3902}
3903
3904/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003905 * FUNCTION : hdrPlusPerfLock
3906 *
3907 * DESCRIPTION: perf lock for HDR+ using custom intent
3908 *
3909 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3910 *
3911 * RETURN : None
3912 *
3913 *==========================================================================*/
3914void QCamera3HardwareInterface::hdrPlusPerfLock(
3915 mm_camera_super_buf_t *metadata_buf)
3916{
3917 if (NULL == metadata_buf) {
3918 LOGE("metadata_buf is NULL");
3919 return;
3920 }
3921 metadata_buffer_t *metadata =
3922 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3923 int32_t *p_frame_number_valid =
3924 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3925 uint32_t *p_frame_number =
3926 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3927
3928 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3929 LOGE("%s: Invalid metadata", __func__);
3930 return;
3931 }
3932
3933 //acquire perf lock for 5 sec after the last HDR frame is captured
3934 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3935 if ((p_frame_number != NULL) &&
3936 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003937 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003938 }
3939 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003940}
3941
3942/*===========================================================================
3943 * FUNCTION : handleInputBufferWithLock
3944 *
3945 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3946 *
3947 * PARAMETERS : @frame_number: frame number of the input buffer
3948 *
3949 * RETURN :
3950 *
3951 *==========================================================================*/
3952void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3953{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003954 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003955 pendingRequestIterator i = mPendingRequestsList.begin();
3956 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3957 i++;
3958 }
3959 if (i != mPendingRequestsList.end() && i->input_buffer) {
3960 //found the right request
3961 if (!i->shutter_notified) {
3962 CameraMetadata settings;
3963 camera3_notify_msg_t notify_msg;
3964 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3965 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3966 if(i->settings) {
3967 settings = i->settings;
3968 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3969 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3970 } else {
3971 LOGE("No timestamp in input settings! Using current one.");
3972 }
3973 } else {
3974 LOGE("Input settings missing!");
3975 }
3976
3977 notify_msg.type = CAMERA3_MSG_SHUTTER;
3978 notify_msg.message.shutter.frame_number = frame_number;
3979 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003980 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003981 i->shutter_notified = true;
3982 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3983 i->frame_number, notify_msg.message.shutter.timestamp);
3984 }
3985
3986 if (i->input_buffer->release_fence != -1) {
3987 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3988 close(i->input_buffer->release_fence);
3989 if (rc != OK) {
3990 LOGE("input buffer sync wait failed %d", rc);
3991 }
3992 }
3993
3994 camera3_capture_result result;
3995 memset(&result, 0, sizeof(camera3_capture_result));
3996 result.frame_number = frame_number;
3997 result.result = i->settings;
3998 result.input_buffer = i->input_buffer;
3999 result.partial_result = PARTIAL_RESULT_COUNT;
4000
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004001 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004002 LOGD("Input request metadata and input buffer frame_number = %u",
4003 i->frame_number);
4004 i = erasePendingRequest(i);
4005 } else {
4006 LOGE("Could not find input request for frame number %d", frame_number);
4007 }
4008}
4009
4010/*===========================================================================
4011 * FUNCTION : handleBufferWithLock
4012 *
4013 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4014 *
4015 * PARAMETERS : @buffer: image buffer for the callback
4016 * @frame_number: frame number of the image buffer
4017 *
4018 * RETURN :
4019 *
4020 *==========================================================================*/
4021void QCamera3HardwareInterface::handleBufferWithLock(
4022 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4023{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004024 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004025
4026 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4027 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4028 }
4029
Thierry Strudel3d639192016-09-09 11:52:26 -07004030 /* Nothing to be done during error state */
4031 if ((ERROR == mState) || (DEINIT == mState)) {
4032 return;
4033 }
4034 if (mFlushPerf) {
4035 handleBuffersDuringFlushLock(buffer);
4036 return;
4037 }
4038 //not in flush
4039 // If the frame number doesn't exist in the pending request list,
4040 // directly send the buffer to the frameworks, and update pending buffers map
4041 // Otherwise, book-keep the buffer.
4042 pendingRequestIterator i = mPendingRequestsList.begin();
4043 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4044 i++;
4045 }
4046 if (i == mPendingRequestsList.end()) {
4047 // Verify all pending requests frame_numbers are greater
4048 for (pendingRequestIterator j = mPendingRequestsList.begin();
4049 j != mPendingRequestsList.end(); j++) {
4050 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
4051 LOGW("Error: pending live frame number %d is smaller than %d",
4052 j->frame_number, frame_number);
4053 }
4054 }
4055 camera3_capture_result_t result;
4056 memset(&result, 0, sizeof(camera3_capture_result_t));
4057 result.result = NULL;
4058 result.frame_number = frame_number;
4059 result.num_output_buffers = 1;
4060 result.partial_result = 0;
4061 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4062 m != mPendingFrameDropList.end(); m++) {
4063 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4064 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4065 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4066 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4067 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4068 frame_number, streamID);
4069 m = mPendingFrameDropList.erase(m);
4070 break;
4071 }
4072 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004073 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07004074 result.output_buffers = buffer;
4075 LOGH("result frame_number = %d, buffer = %p",
4076 frame_number, buffer->buffer);
4077
4078 mPendingBuffersMap.removeBuf(buffer->buffer);
4079
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004080 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004081 } else {
4082 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004083 if (i->input_buffer->release_fence != -1) {
4084 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
4085 close(i->input_buffer->release_fence);
4086 if (rc != OK) {
4087 LOGE("input buffer sync wait failed %d", rc);
4088 }
4089 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004090 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004091
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004092 // Put buffer into the pending request
4093 for (auto &requestedBuffer : i->buffers) {
4094 if (requestedBuffer.stream == buffer->stream) {
4095 if (requestedBuffer.buffer != nullptr) {
4096 LOGE("Error: buffer is already set");
4097 } else {
4098 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
4099 sizeof(camera3_stream_buffer_t));
4100 *(requestedBuffer.buffer) = *buffer;
4101 LOGH("cache buffer %p at result frame_number %u",
4102 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07004103 }
4104 }
4105 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004106
4107 if (i->input_buffer) {
4108 // For a reprocessing request, try to send out shutter callback and result metadata.
4109 handlePendingResultsWithLock(frame_number, nullptr);
4110 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004111 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004112
4113 if (mPreviewStarted == false) {
4114 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4115 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004116 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4117
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004118 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4119 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4120 mPreviewStarted = true;
4121
4122 // Set power hint for preview
4123 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4124 }
4125 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004126}
4127
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004128void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4129 const camera_metadata_t *resultMetadata)
4130{
4131 // Find the pending request for this result metadata.
4132 auto requestIter = mPendingRequestsList.begin();
4133 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4134 requestIter++;
4135 }
4136
4137 if (requestIter == mPendingRequestsList.end()) {
4138 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4139 return;
4140 }
4141
4142 // Update the result metadata
4143 requestIter->resultMetadata = resultMetadata;
4144
4145 // Check what type of request this is.
4146 bool liveRequest = false;
4147 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004148 // HDR+ request doesn't have partial results.
4149 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004150 } else if (requestIter->input_buffer != nullptr) {
4151 // Reprocessing request result is the same as settings.
4152 requestIter->resultMetadata = requestIter->settings;
4153 // Reprocessing request doesn't have partial results.
4154 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4155 } else {
4156 liveRequest = true;
4157 requestIter->partial_result_cnt++;
4158 mPendingLiveRequest--;
4159
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004160 {
4161 Mutex::Autolock l(gHdrPlusClientLock);
4162 // For a live request, send the metadata to HDR+ client.
4163 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4164 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4165 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4166 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004167 }
4168 }
4169
4170 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4171 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4172 bool readyToSend = true;
4173
4174 // Iterate through the pending requests to send out shutter callbacks and results that are
4175 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4176 // live requests that don't have result metadata yet.
4177 auto iter = mPendingRequestsList.begin();
4178 while (iter != mPendingRequestsList.end()) {
4179 // Check if current pending request is ready. If it's not ready, the following pending
4180 // requests are also not ready.
4181 if (readyToSend && iter->resultMetadata == nullptr) {
4182 readyToSend = false;
4183 }
4184
4185 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4186
4187 std::vector<camera3_stream_buffer_t> outputBuffers;
4188
4189 camera3_capture_result_t result = {};
4190 result.frame_number = iter->frame_number;
4191 result.result = iter->resultMetadata;
4192 result.partial_result = iter->partial_result_cnt;
4193
4194 // If this pending buffer has result metadata, we may be able to send out shutter callback
4195 // and result metadata.
4196 if (iter->resultMetadata != nullptr) {
4197 if (!readyToSend) {
4198 // If any of the previous pending request is not ready, this pending request is
4199 // also not ready to send in order to keep shutter callbacks and result metadata
4200 // in order.
4201 iter++;
4202 continue;
4203 }
4204
4205 // Invoke shutter callback if not yet.
4206 if (!iter->shutter_notified) {
4207 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4208
4209 // Find the timestamp in HDR+ result metadata
4210 camera_metadata_ro_entry_t entry;
4211 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4212 ANDROID_SENSOR_TIMESTAMP, &entry);
4213 if (res != OK) {
4214 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4215 __FUNCTION__, iter->frame_number, strerror(-res), res);
4216 } else {
4217 timestamp = entry.data.i64[0];
4218 }
4219
4220 camera3_notify_msg_t notify_msg = {};
4221 notify_msg.type = CAMERA3_MSG_SHUTTER;
4222 notify_msg.message.shutter.frame_number = iter->frame_number;
4223 notify_msg.message.shutter.timestamp = timestamp;
4224 orchestrateNotify(&notify_msg);
4225 iter->shutter_notified = true;
4226 }
4227
4228 result.input_buffer = iter->input_buffer;
4229
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004230 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4231 // If the result metadata belongs to a live request, notify errors for previous pending
4232 // live requests.
4233 mPendingLiveRequest--;
4234
4235 CameraMetadata dummyMetadata;
4236 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4237 result.result = dummyMetadata.release();
4238
4239 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004240
4241 // partial_result should be PARTIAL_RESULT_CNT in case of
4242 // ERROR_RESULT.
4243 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4244 result.partial_result = PARTIAL_RESULT_COUNT;
4245
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004246 } else {
4247 iter++;
4248 continue;
4249 }
4250
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004251 // Prepare output buffer array
4252 for (auto bufferInfoIter = iter->buffers.begin();
4253 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4254 if (bufferInfoIter->buffer != nullptr) {
4255
4256 QCamera3Channel *channel =
4257 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4258 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4259
4260 // Check if this buffer is a dropped frame.
4261 auto frameDropIter = mPendingFrameDropList.begin();
4262 while (frameDropIter != mPendingFrameDropList.end()) {
4263 if((frameDropIter->stream_ID == streamID) &&
4264 (frameDropIter->frame_number == frameNumber)) {
4265 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4266 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4267 streamID);
4268 mPendingFrameDropList.erase(frameDropIter);
4269 break;
4270 } else {
4271 frameDropIter++;
4272 }
4273 }
4274
4275 // Check buffer error status
4276 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4277 bufferInfoIter->buffer->buffer);
4278 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4279
4280 outputBuffers.push_back(*(bufferInfoIter->buffer));
4281 free(bufferInfoIter->buffer);
4282 bufferInfoIter->buffer = NULL;
4283 }
4284 }
4285
4286 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4287 result.num_output_buffers = outputBuffers.size();
4288
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004289 orchestrateResult(&result);
4290
4291 // For reprocessing, result metadata is the same as settings so do not free it here to
4292 // avoid double free.
4293 if (result.result != iter->settings) {
4294 free_camera_metadata((camera_metadata_t *)result.result);
4295 }
4296 iter->resultMetadata = nullptr;
4297 iter = erasePendingRequest(iter);
4298 }
4299
4300 if (liveRequest) {
4301 for (auto &iter : mPendingRequestsList) {
4302 // Increment pipeline depth for the following pending requests.
4303 if (iter.frame_number > frameNumber) {
4304 iter.pipeline_depth++;
4305 }
4306 }
4307 }
4308
4309 unblockRequestIfNecessary();
4310}
4311
Thierry Strudel3d639192016-09-09 11:52:26 -07004312/*===========================================================================
4313 * FUNCTION : unblockRequestIfNecessary
4314 *
4315 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4316 * that mMutex is held when this function is called.
4317 *
4318 * PARAMETERS :
4319 *
4320 * RETURN :
4321 *
4322 *==========================================================================*/
4323void QCamera3HardwareInterface::unblockRequestIfNecessary()
4324{
4325 // Unblock process_capture_request
4326 pthread_cond_signal(&mRequestCond);
4327}
4328
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004329/*===========================================================================
4330 * FUNCTION : isHdrSnapshotRequest
4331 *
4332 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4333 *
4334 * PARAMETERS : camera3 request structure
4335 *
4336 * RETURN : boolean decision variable
4337 *
4338 *==========================================================================*/
4339bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4340{
4341 if (request == NULL) {
4342 LOGE("Invalid request handle");
4343 assert(0);
4344 return false;
4345 }
4346
4347 if (!mForceHdrSnapshot) {
4348 CameraMetadata frame_settings;
4349 frame_settings = request->settings;
4350
4351 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4352 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4353 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4354 return false;
4355 }
4356 } else {
4357 return false;
4358 }
4359
4360 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4361 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4362 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4363 return false;
4364 }
4365 } else {
4366 return false;
4367 }
4368 }
4369
4370 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4371 if (request->output_buffers[i].stream->format
4372 == HAL_PIXEL_FORMAT_BLOB) {
4373 return true;
4374 }
4375 }
4376
4377 return false;
4378}
4379/*===========================================================================
4380 * FUNCTION : orchestrateRequest
4381 *
4382 * DESCRIPTION: Orchestrates a capture request from camera service
4383 *
4384 * PARAMETERS :
4385 * @request : request from framework to process
4386 *
4387 * RETURN : Error status codes
4388 *
4389 *==========================================================================*/
4390int32_t QCamera3HardwareInterface::orchestrateRequest(
4391 camera3_capture_request_t *request)
4392{
4393
4394 uint32_t originalFrameNumber = request->frame_number;
4395 uint32_t originalOutputCount = request->num_output_buffers;
4396 const camera_metadata_t *original_settings = request->settings;
4397 List<InternalRequest> internallyRequestedStreams;
4398 List<InternalRequest> emptyInternalList;
4399
4400 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4401 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4402 uint32_t internalFrameNumber;
4403 CameraMetadata modified_meta;
4404
4405
4406 /* Add Blob channel to list of internally requested streams */
4407 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4408 if (request->output_buffers[i].stream->format
4409 == HAL_PIXEL_FORMAT_BLOB) {
4410 InternalRequest streamRequested;
4411 streamRequested.meteringOnly = 1;
4412 streamRequested.need_metadata = 0;
4413 streamRequested.stream = request->output_buffers[i].stream;
4414 internallyRequestedStreams.push_back(streamRequested);
4415 }
4416 }
4417 request->num_output_buffers = 0;
4418 auto itr = internallyRequestedStreams.begin();
4419
4420 /* Modify setting to set compensation */
4421 modified_meta = request->settings;
4422 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4423 uint8_t aeLock = 1;
4424 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4425 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4426 camera_metadata_t *modified_settings = modified_meta.release();
4427 request->settings = modified_settings;
4428
4429 /* Capture Settling & -2x frame */
4430 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4431 request->frame_number = internalFrameNumber;
4432 processCaptureRequest(request, internallyRequestedStreams);
4433
4434 request->num_output_buffers = originalOutputCount;
4435 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4436 request->frame_number = internalFrameNumber;
4437 processCaptureRequest(request, emptyInternalList);
4438 request->num_output_buffers = 0;
4439
4440 modified_meta = modified_settings;
4441 expCompensation = 0;
4442 aeLock = 1;
4443 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4444 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4445 modified_settings = modified_meta.release();
4446 request->settings = modified_settings;
4447
4448 /* Capture Settling & 0X frame */
4449
4450 itr = internallyRequestedStreams.begin();
4451 if (itr == internallyRequestedStreams.end()) {
4452 LOGE("Error Internally Requested Stream list is empty");
4453 assert(0);
4454 } else {
4455 itr->need_metadata = 0;
4456 itr->meteringOnly = 1;
4457 }
4458
4459 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4460 request->frame_number = internalFrameNumber;
4461 processCaptureRequest(request, internallyRequestedStreams);
4462
4463 itr = internallyRequestedStreams.begin();
4464 if (itr == internallyRequestedStreams.end()) {
4465 ALOGE("Error Internally Requested Stream list is empty");
4466 assert(0);
4467 } else {
4468 itr->need_metadata = 1;
4469 itr->meteringOnly = 0;
4470 }
4471
4472 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4473 request->frame_number = internalFrameNumber;
4474 processCaptureRequest(request, internallyRequestedStreams);
4475
4476 /* Capture 2X frame*/
4477 modified_meta = modified_settings;
4478 expCompensation = GB_HDR_2X_STEP_EV;
4479 aeLock = 1;
4480 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4481 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4482 modified_settings = modified_meta.release();
4483 request->settings = modified_settings;
4484
4485 itr = internallyRequestedStreams.begin();
4486 if (itr == internallyRequestedStreams.end()) {
4487 ALOGE("Error Internally Requested Stream list is empty");
4488 assert(0);
4489 } else {
4490 itr->need_metadata = 0;
4491 itr->meteringOnly = 1;
4492 }
4493 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4494 request->frame_number = internalFrameNumber;
4495 processCaptureRequest(request, internallyRequestedStreams);
4496
4497 itr = internallyRequestedStreams.begin();
4498 if (itr == internallyRequestedStreams.end()) {
4499 ALOGE("Error Internally Requested Stream list is empty");
4500 assert(0);
4501 } else {
4502 itr->need_metadata = 1;
4503 itr->meteringOnly = 0;
4504 }
4505
4506 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4507 request->frame_number = internalFrameNumber;
4508 processCaptureRequest(request, internallyRequestedStreams);
4509
4510
4511 /* Capture 2X on original streaming config*/
4512 internallyRequestedStreams.clear();
4513
4514 /* Restore original settings pointer */
4515 request->settings = original_settings;
4516 } else {
4517 uint32_t internalFrameNumber;
4518 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4519 request->frame_number = internalFrameNumber;
4520 return processCaptureRequest(request, internallyRequestedStreams);
4521 }
4522
4523 return NO_ERROR;
4524}
4525
4526/*===========================================================================
4527 * FUNCTION : orchestrateResult
4528 *
4529 * DESCRIPTION: Orchestrates a capture result to camera service
4530 *
4531 * PARAMETERS :
4532 * @request : request from framework to process
4533 *
4534 * RETURN :
4535 *
4536 *==========================================================================*/
4537void QCamera3HardwareInterface::orchestrateResult(
4538 camera3_capture_result_t *result)
4539{
4540 uint32_t frameworkFrameNumber;
4541 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4542 frameworkFrameNumber);
4543 if (rc != NO_ERROR) {
4544 LOGE("Cannot find translated frameworkFrameNumber");
4545 assert(0);
4546 } else {
4547 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004548 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004549 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004550 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004551 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4552 camera_metadata_entry_t entry;
4553 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4554 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004555 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004556 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4557 if (ret != OK)
4558 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004559 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004560 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004561 result->frame_number = frameworkFrameNumber;
4562 mCallbackOps->process_capture_result(mCallbackOps, result);
4563 }
4564 }
4565}
4566
4567/*===========================================================================
4568 * FUNCTION : orchestrateNotify
4569 *
4570 * DESCRIPTION: Orchestrates a notify to camera service
4571 *
4572 * PARAMETERS :
4573 * @request : request from framework to process
4574 *
4575 * RETURN :
4576 *
4577 *==========================================================================*/
4578void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4579{
4580 uint32_t frameworkFrameNumber;
4581 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004582 int32_t rc = NO_ERROR;
4583
4584 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004585 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004586
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004587 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004588 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4589 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4590 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004591 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004592 LOGE("Cannot find translated frameworkFrameNumber");
4593 assert(0);
4594 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004595 }
4596 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004597
4598 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4599 LOGD("Internal Request drop the notifyCb");
4600 } else {
4601 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4602 mCallbackOps->notify(mCallbackOps, notify_msg);
4603 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004604}
4605
4606/*===========================================================================
4607 * FUNCTION : FrameNumberRegistry
4608 *
4609 * DESCRIPTION: Constructor
4610 *
4611 * PARAMETERS :
4612 *
4613 * RETURN :
4614 *
4615 *==========================================================================*/
4616FrameNumberRegistry::FrameNumberRegistry()
4617{
4618 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4619}
4620
4621/*===========================================================================
4622 * FUNCTION : ~FrameNumberRegistry
4623 *
4624 * DESCRIPTION: Destructor
4625 *
4626 * PARAMETERS :
4627 *
4628 * RETURN :
4629 *
4630 *==========================================================================*/
4631FrameNumberRegistry::~FrameNumberRegistry()
4632{
4633}
4634
4635/*===========================================================================
4636 * FUNCTION : PurgeOldEntriesLocked
4637 *
4638 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4639 *
4640 * PARAMETERS :
4641 *
4642 * RETURN : NONE
4643 *
4644 *==========================================================================*/
4645void FrameNumberRegistry::purgeOldEntriesLocked()
4646{
4647 while (_register.begin() != _register.end()) {
4648 auto itr = _register.begin();
4649 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4650 _register.erase(itr);
4651 } else {
4652 return;
4653 }
4654 }
4655}
4656
4657/*===========================================================================
4658 * FUNCTION : allocStoreInternalFrameNumber
4659 *
4660 * DESCRIPTION: Method to note down a framework request and associate a new
4661 * internal request number against it
4662 *
4663 * PARAMETERS :
4664 * @fFrameNumber: Identifier given by framework
4665 * @internalFN : Output parameter which will have the newly generated internal
4666 * entry
4667 *
4668 * RETURN : Error code
4669 *
4670 *==========================================================================*/
4671int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4672 uint32_t &internalFrameNumber)
4673{
4674 Mutex::Autolock lock(mRegistryLock);
4675 internalFrameNumber = _nextFreeInternalNumber++;
4676 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4677 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4678 purgeOldEntriesLocked();
4679 return NO_ERROR;
4680}
4681
4682/*===========================================================================
4683 * FUNCTION : generateStoreInternalFrameNumber
4684 *
4685 * DESCRIPTION: Method to associate a new internal request number independent
4686 * of any associate with framework requests
4687 *
4688 * PARAMETERS :
4689 * @internalFrame#: Output parameter which will have the newly generated internal
4690 *
4691 *
4692 * RETURN : Error code
4693 *
4694 *==========================================================================*/
4695int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4696{
4697 Mutex::Autolock lock(mRegistryLock);
4698 internalFrameNumber = _nextFreeInternalNumber++;
4699 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4700 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4701 purgeOldEntriesLocked();
4702 return NO_ERROR;
4703}
4704
4705/*===========================================================================
4706 * FUNCTION : getFrameworkFrameNumber
4707 *
4708 * DESCRIPTION: Method to query the framework framenumber given an internal #
4709 *
4710 * PARAMETERS :
4711 * @internalFrame#: Internal reference
4712 * @frameworkframenumber: Output parameter holding framework frame entry
4713 *
4714 * RETURN : Error code
4715 *
4716 *==========================================================================*/
4717int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4718 uint32_t &frameworkFrameNumber)
4719{
4720 Mutex::Autolock lock(mRegistryLock);
4721 auto itr = _register.find(internalFrameNumber);
4722 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004723 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004724 return -ENOENT;
4725 }
4726
4727 frameworkFrameNumber = itr->second;
4728 purgeOldEntriesLocked();
4729 return NO_ERROR;
4730}
Thierry Strudel3d639192016-09-09 11:52:26 -07004731
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004732status_t QCamera3HardwareInterface::fillPbStreamConfig(
4733 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4734 QCamera3Channel *channel, uint32_t streamIndex) {
4735 if (config == nullptr) {
4736 LOGE("%s: config is null", __FUNCTION__);
4737 return BAD_VALUE;
4738 }
4739
4740 if (channel == nullptr) {
4741 LOGE("%s: channel is null", __FUNCTION__);
4742 return BAD_VALUE;
4743 }
4744
4745 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4746 if (stream == nullptr) {
4747 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4748 return NAME_NOT_FOUND;
4749 }
4750
4751 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4752 if (streamInfo == nullptr) {
4753 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4754 return NAME_NOT_FOUND;
4755 }
4756
4757 config->id = pbStreamId;
4758 config->image.width = streamInfo->dim.width;
4759 config->image.height = streamInfo->dim.height;
4760 config->image.padding = 0;
4761 config->image.format = pbStreamFormat;
4762
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004763 uint32_t totalPlaneSize = 0;
4764
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004765 // Fill plane information.
4766 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4767 pbcamera::PlaneConfiguration plane;
4768 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4769 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4770 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004771
4772 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004773 }
4774
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004775 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004776 return OK;
4777}
4778
Thierry Strudel3d639192016-09-09 11:52:26 -07004779/*===========================================================================
4780 * FUNCTION : processCaptureRequest
4781 *
4782 * DESCRIPTION: process a capture request from camera service
4783 *
4784 * PARAMETERS :
4785 * @request : request from framework to process
4786 *
4787 * RETURN :
4788 *
4789 *==========================================================================*/
4790int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004791 camera3_capture_request_t *request,
4792 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004793{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004794 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004795 int rc = NO_ERROR;
4796 int32_t request_id;
4797 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004798 bool isVidBufRequested = false;
4799 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004800 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004801
4802 pthread_mutex_lock(&mMutex);
4803
4804 // Validate current state
4805 switch (mState) {
4806 case CONFIGURED:
4807 case STARTED:
4808 /* valid state */
4809 break;
4810
4811 case ERROR:
4812 pthread_mutex_unlock(&mMutex);
4813 handleCameraDeviceError();
4814 return -ENODEV;
4815
4816 default:
4817 LOGE("Invalid state %d", mState);
4818 pthread_mutex_unlock(&mMutex);
4819 return -ENODEV;
4820 }
4821
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004822 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004823 if (rc != NO_ERROR) {
4824 LOGE("incoming request is not valid");
4825 pthread_mutex_unlock(&mMutex);
4826 return rc;
4827 }
4828
4829 meta = request->settings;
4830
4831 // For first capture request, send capture intent, and
4832 // stream on all streams
4833 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004834 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004835 // send an unconfigure to the backend so that the isp
4836 // resources are deallocated
4837 if (!mFirstConfiguration) {
4838 cam_stream_size_info_t stream_config_info;
4839 int32_t hal_version = CAM_HAL_V3;
4840 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4841 stream_config_info.buffer_info.min_buffers =
4842 MIN_INFLIGHT_REQUESTS;
4843 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004844 m_bIs4KVideo ? 0 :
4845 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004846 clear_metadata_buffer(mParameters);
4847 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4848 CAM_INTF_PARM_HAL_VERSION, hal_version);
4849 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4850 CAM_INTF_META_STREAM_INFO, stream_config_info);
4851 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4852 mParameters);
4853 if (rc < 0) {
4854 LOGE("set_parms for unconfigure failed");
4855 pthread_mutex_unlock(&mMutex);
4856 return rc;
4857 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004858
Thierry Strudel3d639192016-09-09 11:52:26 -07004859 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004860 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004861 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004862 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004863 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004864 property_get("persist.camera.is_type", is_type_value, "4");
4865 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4866 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4867 property_get("persist.camera.is_type_preview", is_type_value, "4");
4868 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4869 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004870
4871 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4872 int32_t hal_version = CAM_HAL_V3;
4873 uint8_t captureIntent =
4874 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4875 mCaptureIntent = captureIntent;
4876 clear_metadata_buffer(mParameters);
4877 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4878 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4879 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004880 if (mFirstConfiguration) {
4881 // configure instant AEC
4882 // Instant AEC is a session based parameter and it is needed only
4883 // once per complete session after open camera.
4884 // i.e. This is set only once for the first capture request, after open camera.
4885 setInstantAEC(meta);
4886 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004887 uint8_t fwkVideoStabMode=0;
4888 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4889 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4890 }
4891
Xue Tuecac74e2017-04-17 13:58:15 -07004892 // If EIS setprop is enabled then only turn it on for video/preview
4893 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004894 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004895 int32_t vsMode;
4896 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4897 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4898 rc = BAD_VALUE;
4899 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004900 LOGD("setEis %d", setEis);
4901 bool eis3Supported = false;
4902 size_t count = IS_TYPE_MAX;
4903 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4904 for (size_t i = 0; i < count; i++) {
4905 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4906 eis3Supported = true;
4907 break;
4908 }
4909 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004910
4911 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004912 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004913 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4914 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004915 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4916 is_type = isTypePreview;
4917 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4918 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4919 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004920 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004921 } else {
4922 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004923 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004924 } else {
4925 is_type = IS_TYPE_NONE;
4926 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004927 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004928 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004929 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4930 }
4931 }
4932
4933 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4934 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4935
Thierry Strudel54dc9782017-02-15 12:12:10 -08004936 //Disable tintless only if the property is set to 0
4937 memset(prop, 0, sizeof(prop));
4938 property_get("persist.camera.tintless.enable", prop, "1");
4939 int32_t tintless_value = atoi(prop);
4940
Thierry Strudel3d639192016-09-09 11:52:26 -07004941 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4942 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004943
Thierry Strudel3d639192016-09-09 11:52:26 -07004944 //Disable CDS for HFR mode or if DIS/EIS is on.
4945 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4946 //after every configure_stream
4947 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4948 (m_bIsVideo)) {
4949 int32_t cds = CAM_CDS_MODE_OFF;
4950 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4951 CAM_INTF_PARM_CDS_MODE, cds))
4952 LOGE("Failed to disable CDS for HFR mode");
4953
4954 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004955
4956 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4957 uint8_t* use_av_timer = NULL;
4958
4959 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004960 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004961 use_av_timer = &m_debug_avtimer;
4962 }
4963 else{
4964 use_av_timer =
4965 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004966 if (use_av_timer) {
4967 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4968 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004969 }
4970
4971 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4972 rc = BAD_VALUE;
4973 }
4974 }
4975
Thierry Strudel3d639192016-09-09 11:52:26 -07004976 setMobicat();
4977
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004978 uint8_t nrMode = 0;
4979 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4980 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4981 }
4982
Thierry Strudel3d639192016-09-09 11:52:26 -07004983 /* Set fps and hfr mode while sending meta stream info so that sensor
4984 * can configure appropriate streaming mode */
4985 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004986 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4987 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004988 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4989 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004990 if (rc == NO_ERROR) {
4991 int32_t max_fps =
4992 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004993 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004994 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4995 }
4996 /* For HFR, more buffers are dequeued upfront to improve the performance */
4997 if (mBatchSize) {
4998 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4999 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5000 }
5001 }
5002 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005003 LOGE("setHalFpsRange failed");
5004 }
5005 }
5006 if (meta.exists(ANDROID_CONTROL_MODE)) {
5007 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5008 rc = extractSceneMode(meta, metaMode, mParameters);
5009 if (rc != NO_ERROR) {
5010 LOGE("extractSceneMode failed");
5011 }
5012 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005013 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005014
Thierry Strudel04e026f2016-10-10 11:27:36 -07005015 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5016 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5017 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5018 rc = setVideoHdrMode(mParameters, vhdr);
5019 if (rc != NO_ERROR) {
5020 LOGE("setVideoHDR is failed");
5021 }
5022 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005023
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005024 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005025 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005026 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005027 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5028 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5029 sensorModeFullFov)) {
5030 rc = BAD_VALUE;
5031 }
5032 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005033 //TODO: validate the arguments, HSV scenemode should have only the
5034 //advertised fps ranges
5035
5036 /*set the capture intent, hal version, tintless, stream info,
5037 *and disenable parameters to the backend*/
5038 LOGD("set_parms META_STREAM_INFO " );
5039 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005040 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5041 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005042 mStreamConfigInfo.type[i],
5043 mStreamConfigInfo.stream_sizes[i].width,
5044 mStreamConfigInfo.stream_sizes[i].height,
5045 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005046 mStreamConfigInfo.format[i],
5047 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005048 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005049
Thierry Strudel3d639192016-09-09 11:52:26 -07005050 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5051 mParameters);
5052 if (rc < 0) {
5053 LOGE("set_parms failed for hal version, stream info");
5054 }
5055
Chien-Yu Chenee335912017-02-09 17:53:20 -08005056 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5057 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005058 if (rc != NO_ERROR) {
5059 LOGE("Failed to get sensor output size");
5060 pthread_mutex_unlock(&mMutex);
5061 goto error_exit;
5062 }
5063
5064 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5065 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08005066 mSensorModeInfo.active_array_size.width,
5067 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005068
5069 /* Set batchmode before initializing channel. Since registerBuffer
5070 * internally initializes some of the channels, better set batchmode
5071 * even before first register buffer */
5072 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5073 it != mStreamInfo.end(); it++) {
5074 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5075 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5076 && mBatchSize) {
5077 rc = channel->setBatchSize(mBatchSize);
5078 //Disable per frame map unmap for HFR/batchmode case
5079 rc |= channel->setPerFrameMapUnmap(false);
5080 if (NO_ERROR != rc) {
5081 LOGE("Channel init failed %d", rc);
5082 pthread_mutex_unlock(&mMutex);
5083 goto error_exit;
5084 }
5085 }
5086 }
5087
5088 //First initialize all streams
5089 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5090 it != mStreamInfo.end(); it++) {
5091 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005092
5093 /* Initial value of NR mode is needed before stream on */
5094 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005095 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5096 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005097 setEis) {
5098 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5099 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5100 is_type = mStreamConfigInfo.is_type[i];
5101 break;
5102 }
5103 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005104 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005105 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005106 rc = channel->initialize(IS_TYPE_NONE);
5107 }
5108 if (NO_ERROR != rc) {
5109 LOGE("Channel initialization failed %d", rc);
5110 pthread_mutex_unlock(&mMutex);
5111 goto error_exit;
5112 }
5113 }
5114
5115 if (mRawDumpChannel) {
5116 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5117 if (rc != NO_ERROR) {
5118 LOGE("Error: Raw Dump Channel init failed");
5119 pthread_mutex_unlock(&mMutex);
5120 goto error_exit;
5121 }
5122 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005123 if (mHdrPlusRawSrcChannel) {
5124 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5125 if (rc != NO_ERROR) {
5126 LOGE("Error: HDR+ RAW Source Channel init failed");
5127 pthread_mutex_unlock(&mMutex);
5128 goto error_exit;
5129 }
5130 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005131 if (mSupportChannel) {
5132 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5133 if (rc < 0) {
5134 LOGE("Support channel initialization failed");
5135 pthread_mutex_unlock(&mMutex);
5136 goto error_exit;
5137 }
5138 }
5139 if (mAnalysisChannel) {
5140 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5141 if (rc < 0) {
5142 LOGE("Analysis channel initialization failed");
5143 pthread_mutex_unlock(&mMutex);
5144 goto error_exit;
5145 }
5146 }
5147 if (mDummyBatchChannel) {
5148 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5149 if (rc < 0) {
5150 LOGE("mDummyBatchChannel setBatchSize failed");
5151 pthread_mutex_unlock(&mMutex);
5152 goto error_exit;
5153 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005154 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005155 if (rc < 0) {
5156 LOGE("mDummyBatchChannel initialization failed");
5157 pthread_mutex_unlock(&mMutex);
5158 goto error_exit;
5159 }
5160 }
5161
5162 // Set bundle info
5163 rc = setBundleInfo();
5164 if (rc < 0) {
5165 LOGE("setBundleInfo failed %d", rc);
5166 pthread_mutex_unlock(&mMutex);
5167 goto error_exit;
5168 }
5169
5170 //update settings from app here
5171 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5172 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5173 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5174 }
5175 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5176 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5177 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5178 }
5179 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5180 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5181 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5182
5183 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5184 (mLinkedCameraId != mCameraId) ) {
5185 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5186 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005187 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005188 goto error_exit;
5189 }
5190 }
5191
5192 // add bundle related cameras
5193 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5194 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005195 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5196 &m_pDualCamCmdPtr->bundle_info;
5197 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005198 if (mIsDeviceLinked)
5199 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5200 else
5201 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5202
5203 pthread_mutex_lock(&gCamLock);
5204
5205 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5206 LOGE("Dualcam: Invalid Session Id ");
5207 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005208 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005209 goto error_exit;
5210 }
5211
5212 if (mIsMainCamera == 1) {
5213 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5214 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005215 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005216 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005217 // related session id should be session id of linked session
5218 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5219 } else {
5220 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5221 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005222 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005223 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005224 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5225 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005226 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005227 pthread_mutex_unlock(&gCamLock);
5228
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005229 rc = mCameraHandle->ops->set_dual_cam_cmd(
5230 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005231 if (rc < 0) {
5232 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005233 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005234 goto error_exit;
5235 }
5236 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005237 goto no_error;
5238error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005239 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005240 return rc;
5241no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005242 mWokenUpByDaemon = false;
5243 mPendingLiveRequest = 0;
5244 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005245 }
5246
5247 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005248 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005249
5250 if (mFlushPerf) {
5251 //we cannot accept any requests during flush
5252 LOGE("process_capture_request cannot proceed during flush");
5253 pthread_mutex_unlock(&mMutex);
5254 return NO_ERROR; //should return an error
5255 }
5256
5257 if (meta.exists(ANDROID_REQUEST_ID)) {
5258 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5259 mCurrentRequestId = request_id;
5260 LOGD("Received request with id: %d", request_id);
5261 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5262 LOGE("Unable to find request id field, \
5263 & no previous id available");
5264 pthread_mutex_unlock(&mMutex);
5265 return NAME_NOT_FOUND;
5266 } else {
5267 LOGD("Re-using old request id");
5268 request_id = mCurrentRequestId;
5269 }
5270
5271 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5272 request->num_output_buffers,
5273 request->input_buffer,
5274 frameNumber);
5275 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005276 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005277 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005278 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005279 uint32_t snapshotStreamId = 0;
5280 for (size_t i = 0; i < request->num_output_buffers; i++) {
5281 const camera3_stream_buffer_t& output = request->output_buffers[i];
5282 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5283
Emilian Peev7650c122017-01-19 08:24:33 -08005284 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5285 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005286 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005287 blob_request = 1;
5288 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5289 }
5290
5291 if (output.acquire_fence != -1) {
5292 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5293 close(output.acquire_fence);
5294 if (rc != OK) {
5295 LOGE("sync wait failed %d", rc);
5296 pthread_mutex_unlock(&mMutex);
5297 return rc;
5298 }
5299 }
5300
Emilian Peev0f3c3162017-03-15 12:57:46 +00005301 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5302 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005303 depthRequestPresent = true;
5304 continue;
5305 }
5306
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005307 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005308 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005309
5310 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5311 isVidBufRequested = true;
5312 }
5313 }
5314
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005315 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5316 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5317 itr++) {
5318 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5319 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5320 channel->getStreamID(channel->getStreamTypeMask());
5321
5322 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5323 isVidBufRequested = true;
5324 }
5325 }
5326
Thierry Strudel3d639192016-09-09 11:52:26 -07005327 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005328 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005329 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005330 }
5331 if (blob_request && mRawDumpChannel) {
5332 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005333 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005334 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005335 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005336 }
5337
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005338 {
5339 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5340 // Request a RAW buffer if
5341 // 1. mHdrPlusRawSrcChannel is valid.
5342 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5343 // 3. There is no pending HDR+ request.
5344 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5345 mHdrPlusPendingRequests.size() == 0) {
5346 streamsArray.stream_request[streamsArray.num_streams].streamID =
5347 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5348 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5349 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005350 }
5351
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005352 //extract capture intent
5353 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5354 mCaptureIntent =
5355 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5356 }
5357
5358 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5359 mCacMode =
5360 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5361 }
5362
5363 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005364 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005365
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005366 {
5367 Mutex::Autolock l(gHdrPlusClientLock);
5368 // If this request has a still capture intent, try to submit an HDR+ request.
5369 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5370 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5371 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5372 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005373 }
5374
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005375 if (hdrPlusRequest) {
5376 // For a HDR+ request, just set the frame parameters.
5377 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5378 if (rc < 0) {
5379 LOGE("fail to set frame parameters");
5380 pthread_mutex_unlock(&mMutex);
5381 return rc;
5382 }
5383 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005384 /* Parse the settings:
5385 * - For every request in NORMAL MODE
5386 * - For every request in HFR mode during preview only case
5387 * - For first request of every batch in HFR mode during video
5388 * recording. In batchmode the same settings except frame number is
5389 * repeated in each request of the batch.
5390 */
5391 if (!mBatchSize ||
5392 (mBatchSize && !isVidBufRequested) ||
5393 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005394 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005395 if (rc < 0) {
5396 LOGE("fail to set frame parameters");
5397 pthread_mutex_unlock(&mMutex);
5398 return rc;
5399 }
5400 }
5401 /* For batchMode HFR, setFrameParameters is not called for every
5402 * request. But only frame number of the latest request is parsed.
5403 * Keep track of first and last frame numbers in a batch so that
5404 * metadata for the frame numbers of batch can be duplicated in
5405 * handleBatchMetadta */
5406 if (mBatchSize) {
5407 if (!mToBeQueuedVidBufs) {
5408 //start of the batch
5409 mFirstFrameNumberInBatch = request->frame_number;
5410 }
5411 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5412 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5413 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005414 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005415 return BAD_VALUE;
5416 }
5417 }
5418 if (mNeedSensorRestart) {
5419 /* Unlock the mutex as restartSensor waits on the channels to be
5420 * stopped, which in turn calls stream callback functions -
5421 * handleBufferWithLock and handleMetadataWithLock */
5422 pthread_mutex_unlock(&mMutex);
5423 rc = dynamicUpdateMetaStreamInfo();
5424 if (rc != NO_ERROR) {
5425 LOGE("Restarting the sensor failed");
5426 return BAD_VALUE;
5427 }
5428 mNeedSensorRestart = false;
5429 pthread_mutex_lock(&mMutex);
5430 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005431 if(mResetInstantAEC) {
5432 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5433 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5434 mResetInstantAEC = false;
5435 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005436 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005437 if (request->input_buffer->acquire_fence != -1) {
5438 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5439 close(request->input_buffer->acquire_fence);
5440 if (rc != OK) {
5441 LOGE("input buffer sync wait failed %d", rc);
5442 pthread_mutex_unlock(&mMutex);
5443 return rc;
5444 }
5445 }
5446 }
5447
5448 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5449 mLastCustIntentFrmNum = frameNumber;
5450 }
5451 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005452 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005453 pendingRequestIterator latestRequest;
5454 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005455 pendingRequest.num_buffers = depthRequestPresent ?
5456 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005457 pendingRequest.request_id = request_id;
5458 pendingRequest.blob_request = blob_request;
5459 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005460 if (request->input_buffer) {
5461 pendingRequest.input_buffer =
5462 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5463 *(pendingRequest.input_buffer) = *(request->input_buffer);
5464 pInputBuffer = pendingRequest.input_buffer;
5465 } else {
5466 pendingRequest.input_buffer = NULL;
5467 pInputBuffer = NULL;
5468 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005469 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005470
5471 pendingRequest.pipeline_depth = 0;
5472 pendingRequest.partial_result_cnt = 0;
5473 extractJpegMetadata(mCurJpegMeta, request);
5474 pendingRequest.jpegMetadata = mCurJpegMeta;
5475 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5476 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005477 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005478 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5479 mHybridAeEnable =
5480 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5481 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005482
5483 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5484 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005485 /* DevCamDebug metadata processCaptureRequest */
5486 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5487 mDevCamDebugMetaEnable =
5488 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5489 }
5490 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5491 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005492
5493 //extract CAC info
5494 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5495 mCacMode =
5496 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5497 }
5498 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005499 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005500
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005501 // extract enableZsl info
5502 if (gExposeEnableZslKey) {
5503 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5504 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5505 mZslEnabled = pendingRequest.enableZsl;
5506 } else {
5507 pendingRequest.enableZsl = mZslEnabled;
5508 }
5509 }
5510
Thierry Strudel3d639192016-09-09 11:52:26 -07005511 PendingBuffersInRequest bufsForCurRequest;
5512 bufsForCurRequest.frame_number = frameNumber;
5513 // Mark current timestamp for the new request
5514 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005515 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005516
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005517 if (hdrPlusRequest) {
5518 // Save settings for this request.
5519 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5520 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5521
5522 // Add to pending HDR+ request queue.
5523 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5524 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5525
5526 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5527 }
5528
Thierry Strudel3d639192016-09-09 11:52:26 -07005529 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005530 if ((request->output_buffers[i].stream->data_space ==
5531 HAL_DATASPACE_DEPTH) &&
5532 (HAL_PIXEL_FORMAT_BLOB ==
5533 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005534 continue;
5535 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005536 RequestedBufferInfo requestedBuf;
5537 memset(&requestedBuf, 0, sizeof(requestedBuf));
5538 requestedBuf.stream = request->output_buffers[i].stream;
5539 requestedBuf.buffer = NULL;
5540 pendingRequest.buffers.push_back(requestedBuf);
5541
5542 // Add to buffer handle the pending buffers list
5543 PendingBufferInfo bufferInfo;
5544 bufferInfo.buffer = request->output_buffers[i].buffer;
5545 bufferInfo.stream = request->output_buffers[i].stream;
5546 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5547 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5548 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5549 frameNumber, bufferInfo.buffer,
5550 channel->getStreamTypeMask(), bufferInfo.stream->format);
5551 }
5552 // Add this request packet into mPendingBuffersMap
5553 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5554 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5555 mPendingBuffersMap.get_num_overall_buffers());
5556
5557 latestRequest = mPendingRequestsList.insert(
5558 mPendingRequestsList.end(), pendingRequest);
5559 if(mFlush) {
5560 LOGI("mFlush is true");
5561 pthread_mutex_unlock(&mMutex);
5562 return NO_ERROR;
5563 }
5564
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005565 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5566 // channel.
5567 if (!hdrPlusRequest) {
5568 int indexUsed;
5569 // Notify metadata channel we receive a request
5570 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005571
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005572 if(request->input_buffer != NULL){
5573 LOGD("Input request, frame_number %d", frameNumber);
5574 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5575 if (NO_ERROR != rc) {
5576 LOGE("fail to set reproc parameters");
5577 pthread_mutex_unlock(&mMutex);
5578 return rc;
5579 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005580 }
5581
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005582 // Call request on other streams
5583 uint32_t streams_need_metadata = 0;
5584 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5585 for (size_t i = 0; i < request->num_output_buffers; i++) {
5586 const camera3_stream_buffer_t& output = request->output_buffers[i];
5587 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5588
5589 if (channel == NULL) {
5590 LOGW("invalid channel pointer for stream");
5591 continue;
5592 }
5593
5594 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5595 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5596 output.buffer, request->input_buffer, frameNumber);
5597 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005598 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005599 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5600 if (rc < 0) {
5601 LOGE("Fail to request on picture channel");
5602 pthread_mutex_unlock(&mMutex);
5603 return rc;
5604 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005605 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005606 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5607 assert(NULL != mDepthChannel);
5608 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005609
Emilian Peev7650c122017-01-19 08:24:33 -08005610 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5611 if (rc < 0) {
5612 LOGE("Fail to map on depth buffer");
5613 pthread_mutex_unlock(&mMutex);
5614 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005615 }
Emilian Peev7650c122017-01-19 08:24:33 -08005616 } else {
5617 LOGD("snapshot request with buffer %p, frame_number %d",
5618 output.buffer, frameNumber);
5619 if (!request->settings) {
5620 rc = channel->request(output.buffer, frameNumber,
5621 NULL, mPrevParameters, indexUsed);
5622 } else {
5623 rc = channel->request(output.buffer, frameNumber,
5624 NULL, mParameters, indexUsed);
5625 }
5626 if (rc < 0) {
5627 LOGE("Fail to request on picture channel");
5628 pthread_mutex_unlock(&mMutex);
5629 return rc;
5630 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005631
Emilian Peev7650c122017-01-19 08:24:33 -08005632 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5633 uint32_t j = 0;
5634 for (j = 0; j < streamsArray.num_streams; j++) {
5635 if (streamsArray.stream_request[j].streamID == streamId) {
5636 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5637 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5638 else
5639 streamsArray.stream_request[j].buf_index = indexUsed;
5640 break;
5641 }
5642 }
5643 if (j == streamsArray.num_streams) {
5644 LOGE("Did not find matching stream to update index");
5645 assert(0);
5646 }
5647
5648 pendingBufferIter->need_metadata = true;
5649 streams_need_metadata++;
5650 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005651 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005652 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5653 bool needMetadata = false;
5654 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5655 rc = yuvChannel->request(output.buffer, frameNumber,
5656 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5657 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005658 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005659 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005660 pthread_mutex_unlock(&mMutex);
5661 return rc;
5662 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005663
5664 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5665 uint32_t j = 0;
5666 for (j = 0; j < streamsArray.num_streams; j++) {
5667 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005668 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5669 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5670 else
5671 streamsArray.stream_request[j].buf_index = indexUsed;
5672 break;
5673 }
5674 }
5675 if (j == streamsArray.num_streams) {
5676 LOGE("Did not find matching stream to update index");
5677 assert(0);
5678 }
5679
5680 pendingBufferIter->need_metadata = needMetadata;
5681 if (needMetadata)
5682 streams_need_metadata += 1;
5683 LOGD("calling YUV channel request, need_metadata is %d",
5684 needMetadata);
5685 } else {
5686 LOGD("request with buffer %p, frame_number %d",
5687 output.buffer, frameNumber);
5688
5689 rc = channel->request(output.buffer, frameNumber, indexUsed);
5690
5691 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5692 uint32_t j = 0;
5693 for (j = 0; j < streamsArray.num_streams; j++) {
5694 if (streamsArray.stream_request[j].streamID == streamId) {
5695 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5696 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5697 else
5698 streamsArray.stream_request[j].buf_index = indexUsed;
5699 break;
5700 }
5701 }
5702 if (j == streamsArray.num_streams) {
5703 LOGE("Did not find matching stream to update index");
5704 assert(0);
5705 }
5706
5707 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5708 && mBatchSize) {
5709 mToBeQueuedVidBufs++;
5710 if (mToBeQueuedVidBufs == mBatchSize) {
5711 channel->queueBatchBuf();
5712 }
5713 }
5714 if (rc < 0) {
5715 LOGE("request failed");
5716 pthread_mutex_unlock(&mMutex);
5717 return rc;
5718 }
5719 }
5720 pendingBufferIter++;
5721 }
5722
5723 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5724 itr++) {
5725 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5726
5727 if (channel == NULL) {
5728 LOGE("invalid channel pointer for stream");
5729 assert(0);
5730 return BAD_VALUE;
5731 }
5732
5733 InternalRequest requestedStream;
5734 requestedStream = (*itr);
5735
5736
5737 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5738 LOGD("snapshot request internally input buffer %p, frame_number %d",
5739 request->input_buffer, frameNumber);
5740 if(request->input_buffer != NULL){
5741 rc = channel->request(NULL, frameNumber,
5742 pInputBuffer, &mReprocMeta, indexUsed, true,
5743 requestedStream.meteringOnly);
5744 if (rc < 0) {
5745 LOGE("Fail to request on picture channel");
5746 pthread_mutex_unlock(&mMutex);
5747 return rc;
5748 }
5749 } else {
5750 LOGD("snapshot request with frame_number %d", frameNumber);
5751 if (!request->settings) {
5752 rc = channel->request(NULL, frameNumber,
5753 NULL, mPrevParameters, indexUsed, true,
5754 requestedStream.meteringOnly);
5755 } else {
5756 rc = channel->request(NULL, frameNumber,
5757 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5758 }
5759 if (rc < 0) {
5760 LOGE("Fail to request on picture channel");
5761 pthread_mutex_unlock(&mMutex);
5762 return rc;
5763 }
5764
5765 if ((*itr).meteringOnly != 1) {
5766 requestedStream.need_metadata = 1;
5767 streams_need_metadata++;
5768 }
5769 }
5770
5771 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5772 uint32_t j = 0;
5773 for (j = 0; j < streamsArray.num_streams; j++) {
5774 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005775 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5776 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5777 else
5778 streamsArray.stream_request[j].buf_index = indexUsed;
5779 break;
5780 }
5781 }
5782 if (j == streamsArray.num_streams) {
5783 LOGE("Did not find matching stream to update index");
5784 assert(0);
5785 }
5786
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005787 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005788 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005789 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005790 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005791 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005792 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005793 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005794
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005795 //If 2 streams have need_metadata set to true, fail the request, unless
5796 //we copy/reference count the metadata buffer
5797 if (streams_need_metadata > 1) {
5798 LOGE("not supporting request in which two streams requires"
5799 " 2 HAL metadata for reprocessing");
5800 pthread_mutex_unlock(&mMutex);
5801 return -EINVAL;
5802 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005803
Emilian Peev7650c122017-01-19 08:24:33 -08005804 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5805 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5806 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5807 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5808 pthread_mutex_unlock(&mMutex);
5809 return BAD_VALUE;
5810 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005811 if (request->input_buffer == NULL) {
5812 /* Set the parameters to backend:
5813 * - For every request in NORMAL MODE
5814 * - For every request in HFR mode during preview only case
5815 * - Once every batch in HFR mode during video recording
5816 */
5817 if (!mBatchSize ||
5818 (mBatchSize && !isVidBufRequested) ||
5819 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5820 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5821 mBatchSize, isVidBufRequested,
5822 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005823
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005824 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5825 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5826 uint32_t m = 0;
5827 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5828 if (streamsArray.stream_request[k].streamID ==
5829 mBatchedStreamsArray.stream_request[m].streamID)
5830 break;
5831 }
5832 if (m == mBatchedStreamsArray.num_streams) {
5833 mBatchedStreamsArray.stream_request\
5834 [mBatchedStreamsArray.num_streams].streamID =
5835 streamsArray.stream_request[k].streamID;
5836 mBatchedStreamsArray.stream_request\
5837 [mBatchedStreamsArray.num_streams].buf_index =
5838 streamsArray.stream_request[k].buf_index;
5839 mBatchedStreamsArray.num_streams =
5840 mBatchedStreamsArray.num_streams + 1;
5841 }
5842 }
5843 streamsArray = mBatchedStreamsArray;
5844 }
5845 /* Update stream id of all the requested buffers */
5846 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5847 streamsArray)) {
5848 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005849 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005850 return BAD_VALUE;
5851 }
5852
5853 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5854 mParameters);
5855 if (rc < 0) {
5856 LOGE("set_parms failed");
5857 }
5858 /* reset to zero coz, the batch is queued */
5859 mToBeQueuedVidBufs = 0;
5860 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5861 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5862 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005863 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5864 uint32_t m = 0;
5865 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5866 if (streamsArray.stream_request[k].streamID ==
5867 mBatchedStreamsArray.stream_request[m].streamID)
5868 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005869 }
5870 if (m == mBatchedStreamsArray.num_streams) {
5871 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5872 streamID = streamsArray.stream_request[k].streamID;
5873 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5874 buf_index = streamsArray.stream_request[k].buf_index;
5875 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5876 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005877 }
5878 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005879 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005880
5881 // Start all streams after the first setting is sent, so that the
5882 // setting can be applied sooner: (0 + apply_delay)th frame.
5883 if (mState == CONFIGURED && mChannelHandle) {
5884 //Then start them.
5885 LOGH("Start META Channel");
5886 rc = mMetadataChannel->start();
5887 if (rc < 0) {
5888 LOGE("META channel start failed");
5889 pthread_mutex_unlock(&mMutex);
5890 return rc;
5891 }
5892
5893 if (mAnalysisChannel) {
5894 rc = mAnalysisChannel->start();
5895 if (rc < 0) {
5896 LOGE("Analysis channel start failed");
5897 mMetadataChannel->stop();
5898 pthread_mutex_unlock(&mMutex);
5899 return rc;
5900 }
5901 }
5902
5903 if (mSupportChannel) {
5904 rc = mSupportChannel->start();
5905 if (rc < 0) {
5906 LOGE("Support channel start failed");
5907 mMetadataChannel->stop();
5908 /* Although support and analysis are mutually exclusive today
5909 adding it in anycase for future proofing */
5910 if (mAnalysisChannel) {
5911 mAnalysisChannel->stop();
5912 }
5913 pthread_mutex_unlock(&mMutex);
5914 return rc;
5915 }
5916 }
5917 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5918 it != mStreamInfo.end(); it++) {
5919 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5920 LOGH("Start Processing Channel mask=%d",
5921 channel->getStreamTypeMask());
5922 rc = channel->start();
5923 if (rc < 0) {
5924 LOGE("channel start failed");
5925 pthread_mutex_unlock(&mMutex);
5926 return rc;
5927 }
5928 }
5929
5930 if (mRawDumpChannel) {
5931 LOGD("Starting raw dump stream");
5932 rc = mRawDumpChannel->start();
5933 if (rc != NO_ERROR) {
5934 LOGE("Error Starting Raw Dump Channel");
5935 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5936 it != mStreamInfo.end(); it++) {
5937 QCamera3Channel *channel =
5938 (QCamera3Channel *)(*it)->stream->priv;
5939 LOGH("Stopping Processing Channel mask=%d",
5940 channel->getStreamTypeMask());
5941 channel->stop();
5942 }
5943 if (mSupportChannel)
5944 mSupportChannel->stop();
5945 if (mAnalysisChannel) {
5946 mAnalysisChannel->stop();
5947 }
5948 mMetadataChannel->stop();
5949 pthread_mutex_unlock(&mMutex);
5950 return rc;
5951 }
5952 }
5953
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005954 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005955 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005956 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005957 if (rc != NO_ERROR) {
5958 LOGE("start_channel failed %d", rc);
5959 pthread_mutex_unlock(&mMutex);
5960 return rc;
5961 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005962
5963 {
5964 // Configure Easel for stream on.
5965 Mutex::Autolock l(gHdrPlusClientLock);
5966 if (EaselManagerClientOpened) {
5967 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
5968 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
5969 if (rc != OK) {
5970 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5971 mCameraId, mSensorModeInfo.op_pixel_clk);
5972 pthread_mutex_unlock(&mMutex);
5973 return rc;
5974 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07005975 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005976 }
5977 }
5978
5979 // Start sensor streaming.
5980 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5981 mChannelHandle);
5982 if (rc != NO_ERROR) {
5983 LOGE("start_sensor_stream_on failed %d", rc);
5984 pthread_mutex_unlock(&mMutex);
5985 return rc;
5986 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005987 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005988 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005989 }
5990
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005991 // Enable HDR+ mode for the first PREVIEW_INTENT request.
5992 {
5993 Mutex::Autolock l(gHdrPlusClientLock);
5994 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5995 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5996 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5997 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5998 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5999 rc = enableHdrPlusModeLocked();
6000 if (rc != OK) {
6001 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6002 pthread_mutex_unlock(&mMutex);
6003 return rc;
6004 }
6005
6006 mFirstPreviewIntentSeen = true;
6007 }
6008 }
6009
Thierry Strudel3d639192016-09-09 11:52:26 -07006010 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6011
6012 mState = STARTED;
6013 // Added a timed condition wait
6014 struct timespec ts;
6015 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006016 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006017 if (rc < 0) {
6018 isValidTimeout = 0;
6019 LOGE("Error reading the real time clock!!");
6020 }
6021 else {
6022 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006023 int64_t timeout = 5;
6024 {
6025 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6026 // If there is a pending HDR+ request, the following requests may be blocked until the
6027 // HDR+ request is done. So allow a longer timeout.
6028 if (mHdrPlusPendingRequests.size() > 0) {
6029 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6030 }
6031 }
6032 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006033 }
6034 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006035 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006036 (mState != ERROR) && (mState != DEINIT)) {
6037 if (!isValidTimeout) {
6038 LOGD("Blocking on conditional wait");
6039 pthread_cond_wait(&mRequestCond, &mMutex);
6040 }
6041 else {
6042 LOGD("Blocking on timed conditional wait");
6043 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6044 if (rc == ETIMEDOUT) {
6045 rc = -ENODEV;
6046 LOGE("Unblocked on timeout!!!!");
6047 break;
6048 }
6049 }
6050 LOGD("Unblocked");
6051 if (mWokenUpByDaemon) {
6052 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006053 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006054 break;
6055 }
6056 }
6057 pthread_mutex_unlock(&mMutex);
6058
6059 return rc;
6060}
6061
6062/*===========================================================================
6063 * FUNCTION : dump
6064 *
6065 * DESCRIPTION:
6066 *
6067 * PARAMETERS :
6068 *
6069 *
6070 * RETURN :
6071 *==========================================================================*/
6072void QCamera3HardwareInterface::dump(int fd)
6073{
6074 pthread_mutex_lock(&mMutex);
6075 dprintf(fd, "\n Camera HAL3 information Begin \n");
6076
6077 dprintf(fd, "\nNumber of pending requests: %zu \n",
6078 mPendingRequestsList.size());
6079 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6080 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6081 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6082 for(pendingRequestIterator i = mPendingRequestsList.begin();
6083 i != mPendingRequestsList.end(); i++) {
6084 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6085 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6086 i->input_buffer);
6087 }
6088 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6089 mPendingBuffersMap.get_num_overall_buffers());
6090 dprintf(fd, "-------+------------------\n");
6091 dprintf(fd, " Frame | Stream type mask \n");
6092 dprintf(fd, "-------+------------------\n");
6093 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6094 for(auto &j : req.mPendingBufferList) {
6095 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6096 dprintf(fd, " %5d | %11d \n",
6097 req.frame_number, channel->getStreamTypeMask());
6098 }
6099 }
6100 dprintf(fd, "-------+------------------\n");
6101
6102 dprintf(fd, "\nPending frame drop list: %zu\n",
6103 mPendingFrameDropList.size());
6104 dprintf(fd, "-------+-----------\n");
6105 dprintf(fd, " Frame | Stream ID \n");
6106 dprintf(fd, "-------+-----------\n");
6107 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6108 i != mPendingFrameDropList.end(); i++) {
6109 dprintf(fd, " %5d | %9d \n",
6110 i->frame_number, i->stream_ID);
6111 }
6112 dprintf(fd, "-------+-----------\n");
6113
6114 dprintf(fd, "\n Camera HAL3 information End \n");
6115
6116 /* use dumpsys media.camera as trigger to send update debug level event */
6117 mUpdateDebugLevel = true;
6118 pthread_mutex_unlock(&mMutex);
6119 return;
6120}
6121
6122/*===========================================================================
6123 * FUNCTION : flush
6124 *
6125 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6126 * conditionally restarts channels
6127 *
6128 * PARAMETERS :
6129 * @ restartChannels: re-start all channels
6130 *
6131 *
6132 * RETURN :
6133 * 0 on success
6134 * Error code on failure
6135 *==========================================================================*/
6136int QCamera3HardwareInterface::flush(bool restartChannels)
6137{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006138 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006139 int32_t rc = NO_ERROR;
6140
6141 LOGD("Unblocking Process Capture Request");
6142 pthread_mutex_lock(&mMutex);
6143 mFlush = true;
6144 pthread_mutex_unlock(&mMutex);
6145
6146 rc = stopAllChannels();
6147 // unlink of dualcam
6148 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006149 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6150 &m_pDualCamCmdPtr->bundle_info;
6151 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006152 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6153 pthread_mutex_lock(&gCamLock);
6154
6155 if (mIsMainCamera == 1) {
6156 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6157 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006158 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006159 // related session id should be session id of linked session
6160 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6161 } else {
6162 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6163 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006164 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006165 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6166 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006167 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006168 pthread_mutex_unlock(&gCamLock);
6169
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006170 rc = mCameraHandle->ops->set_dual_cam_cmd(
6171 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006172 if (rc < 0) {
6173 LOGE("Dualcam: Unlink failed, but still proceed to close");
6174 }
6175 }
6176
6177 if (rc < 0) {
6178 LOGE("stopAllChannels failed");
6179 return rc;
6180 }
6181 if (mChannelHandle) {
6182 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6183 mChannelHandle);
6184 }
6185
6186 // Reset bundle info
6187 rc = setBundleInfo();
6188 if (rc < 0) {
6189 LOGE("setBundleInfo failed %d", rc);
6190 return rc;
6191 }
6192
6193 // Mutex Lock
6194 pthread_mutex_lock(&mMutex);
6195
6196 // Unblock process_capture_request
6197 mPendingLiveRequest = 0;
6198 pthread_cond_signal(&mRequestCond);
6199
6200 rc = notifyErrorForPendingRequests();
6201 if (rc < 0) {
6202 LOGE("notifyErrorForPendingRequests failed");
6203 pthread_mutex_unlock(&mMutex);
6204 return rc;
6205 }
6206
6207 mFlush = false;
6208
6209 // Start the Streams/Channels
6210 if (restartChannels) {
6211 rc = startAllChannels();
6212 if (rc < 0) {
6213 LOGE("startAllChannels failed");
6214 pthread_mutex_unlock(&mMutex);
6215 return rc;
6216 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006217 if (mChannelHandle) {
6218 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006219 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006220 if (rc < 0) {
6221 LOGE("start_channel failed");
6222 pthread_mutex_unlock(&mMutex);
6223 return rc;
6224 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006225 }
6226 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006227 pthread_mutex_unlock(&mMutex);
6228
6229 return 0;
6230}
6231
6232/*===========================================================================
6233 * FUNCTION : flushPerf
6234 *
6235 * DESCRIPTION: This is the performance optimization version of flush that does
6236 * not use stream off, rather flushes the system
6237 *
6238 * PARAMETERS :
6239 *
6240 *
6241 * RETURN : 0 : success
6242 * -EINVAL: input is malformed (device is not valid)
6243 * -ENODEV: if the device has encountered a serious error
6244 *==========================================================================*/
6245int QCamera3HardwareInterface::flushPerf()
6246{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006247 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006248 int32_t rc = 0;
6249 struct timespec timeout;
6250 bool timed_wait = false;
6251
6252 pthread_mutex_lock(&mMutex);
6253 mFlushPerf = true;
6254 mPendingBuffersMap.numPendingBufsAtFlush =
6255 mPendingBuffersMap.get_num_overall_buffers();
6256 LOGD("Calling flush. Wait for %d buffers to return",
6257 mPendingBuffersMap.numPendingBufsAtFlush);
6258
6259 /* send the flush event to the backend */
6260 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6261 if (rc < 0) {
6262 LOGE("Error in flush: IOCTL failure");
6263 mFlushPerf = false;
6264 pthread_mutex_unlock(&mMutex);
6265 return -ENODEV;
6266 }
6267
6268 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6269 LOGD("No pending buffers in HAL, return flush");
6270 mFlushPerf = false;
6271 pthread_mutex_unlock(&mMutex);
6272 return rc;
6273 }
6274
6275 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006276 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006277 if (rc < 0) {
6278 LOGE("Error reading the real time clock, cannot use timed wait");
6279 } else {
6280 timeout.tv_sec += FLUSH_TIMEOUT;
6281 timed_wait = true;
6282 }
6283
6284 //Block on conditional variable
6285 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6286 LOGD("Waiting on mBuffersCond");
6287 if (!timed_wait) {
6288 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6289 if (rc != 0) {
6290 LOGE("pthread_cond_wait failed due to rc = %s",
6291 strerror(rc));
6292 break;
6293 }
6294 } else {
6295 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6296 if (rc != 0) {
6297 LOGE("pthread_cond_timedwait failed due to rc = %s",
6298 strerror(rc));
6299 break;
6300 }
6301 }
6302 }
6303 if (rc != 0) {
6304 mFlushPerf = false;
6305 pthread_mutex_unlock(&mMutex);
6306 return -ENODEV;
6307 }
6308
6309 LOGD("Received buffers, now safe to return them");
6310
6311 //make sure the channels handle flush
6312 //currently only required for the picture channel to release snapshot resources
6313 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6314 it != mStreamInfo.end(); it++) {
6315 QCamera3Channel *channel = (*it)->channel;
6316 if (channel) {
6317 rc = channel->flush();
6318 if (rc) {
6319 LOGE("Flushing the channels failed with error %d", rc);
6320 // even though the channel flush failed we need to continue and
6321 // return the buffers we have to the framework, however the return
6322 // value will be an error
6323 rc = -ENODEV;
6324 }
6325 }
6326 }
6327
6328 /* notify the frameworks and send errored results */
6329 rc = notifyErrorForPendingRequests();
6330 if (rc < 0) {
6331 LOGE("notifyErrorForPendingRequests failed");
6332 pthread_mutex_unlock(&mMutex);
6333 return rc;
6334 }
6335
6336 //unblock process_capture_request
6337 mPendingLiveRequest = 0;
6338 unblockRequestIfNecessary();
6339
6340 mFlushPerf = false;
6341 pthread_mutex_unlock(&mMutex);
6342 LOGD ("Flush Operation complete. rc = %d", rc);
6343 return rc;
6344}
6345
6346/*===========================================================================
6347 * FUNCTION : handleCameraDeviceError
6348 *
6349 * DESCRIPTION: This function calls internal flush and notifies the error to
6350 * framework and updates the state variable.
6351 *
6352 * PARAMETERS : None
6353 *
6354 * RETURN : NO_ERROR on Success
6355 * Error code on failure
6356 *==========================================================================*/
6357int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6358{
6359 int32_t rc = NO_ERROR;
6360
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006361 {
6362 Mutex::Autolock lock(mFlushLock);
6363 pthread_mutex_lock(&mMutex);
6364 if (mState != ERROR) {
6365 //if mState != ERROR, nothing to be done
6366 pthread_mutex_unlock(&mMutex);
6367 return NO_ERROR;
6368 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006369 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006370
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006371 rc = flush(false /* restart channels */);
6372 if (NO_ERROR != rc) {
6373 LOGE("internal flush to handle mState = ERROR failed");
6374 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006375
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006376 pthread_mutex_lock(&mMutex);
6377 mState = DEINIT;
6378 pthread_mutex_unlock(&mMutex);
6379 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006380
6381 camera3_notify_msg_t notify_msg;
6382 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6383 notify_msg.type = CAMERA3_MSG_ERROR;
6384 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6385 notify_msg.message.error.error_stream = NULL;
6386 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006387 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006388
6389 return rc;
6390}
6391
6392/*===========================================================================
6393 * FUNCTION : captureResultCb
6394 *
6395 * DESCRIPTION: Callback handler for all capture result
6396 * (streams, as well as metadata)
6397 *
6398 * PARAMETERS :
6399 * @metadata : metadata information
6400 * @buffer : actual gralloc buffer to be returned to frameworks.
6401 * NULL if metadata.
6402 *
6403 * RETURN : NONE
6404 *==========================================================================*/
6405void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6406 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6407{
6408 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006409 pthread_mutex_lock(&mMutex);
6410 uint8_t batchSize = mBatchSize;
6411 pthread_mutex_unlock(&mMutex);
6412 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006413 handleBatchMetadata(metadata_buf,
6414 true /* free_and_bufdone_meta_buf */);
6415 } else { /* mBatchSize = 0 */
6416 hdrPlusPerfLock(metadata_buf);
6417 pthread_mutex_lock(&mMutex);
6418 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006419 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006420 true /* last urgent frame of batch metadata */,
6421 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006422 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006423 pthread_mutex_unlock(&mMutex);
6424 }
6425 } else if (isInputBuffer) {
6426 pthread_mutex_lock(&mMutex);
6427 handleInputBufferWithLock(frame_number);
6428 pthread_mutex_unlock(&mMutex);
6429 } else {
6430 pthread_mutex_lock(&mMutex);
6431 handleBufferWithLock(buffer, frame_number);
6432 pthread_mutex_unlock(&mMutex);
6433 }
6434 return;
6435}
6436
6437/*===========================================================================
6438 * FUNCTION : getReprocessibleOutputStreamId
6439 *
6440 * DESCRIPTION: Get source output stream id for the input reprocess stream
6441 * based on size and format, which would be the largest
6442 * output stream if an input stream exists.
6443 *
6444 * PARAMETERS :
6445 * @id : return the stream id if found
6446 *
6447 * RETURN : int32_t type of status
6448 * NO_ERROR -- success
6449 * none-zero failure code
6450 *==========================================================================*/
6451int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6452{
6453 /* check if any output or bidirectional stream with the same size and format
6454 and return that stream */
6455 if ((mInputStreamInfo.dim.width > 0) &&
6456 (mInputStreamInfo.dim.height > 0)) {
6457 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6458 it != mStreamInfo.end(); it++) {
6459
6460 camera3_stream_t *stream = (*it)->stream;
6461 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6462 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6463 (stream->format == mInputStreamInfo.format)) {
6464 // Usage flag for an input stream and the source output stream
6465 // may be different.
6466 LOGD("Found reprocessible output stream! %p", *it);
6467 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6468 stream->usage, mInputStreamInfo.usage);
6469
6470 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6471 if (channel != NULL && channel->mStreams[0]) {
6472 id = channel->mStreams[0]->getMyServerID();
6473 return NO_ERROR;
6474 }
6475 }
6476 }
6477 } else {
6478 LOGD("No input stream, so no reprocessible output stream");
6479 }
6480 return NAME_NOT_FOUND;
6481}
6482
6483/*===========================================================================
6484 * FUNCTION : lookupFwkName
6485 *
6486 * DESCRIPTION: In case the enum is not same in fwk and backend
6487 * make sure the parameter is correctly propogated
6488 *
6489 * PARAMETERS :
6490 * @arr : map between the two enums
6491 * @len : len of the map
6492 * @hal_name : name of the hal_parm to map
6493 *
6494 * RETURN : int type of status
6495 * fwk_name -- success
6496 * none-zero failure code
6497 *==========================================================================*/
6498template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6499 size_t len, halType hal_name)
6500{
6501
6502 for (size_t i = 0; i < len; i++) {
6503 if (arr[i].hal_name == hal_name) {
6504 return arr[i].fwk_name;
6505 }
6506 }
6507
6508 /* Not able to find matching framework type is not necessarily
6509 * an error case. This happens when mm-camera supports more attributes
6510 * than the frameworks do */
6511 LOGH("Cannot find matching framework type");
6512 return NAME_NOT_FOUND;
6513}
6514
6515/*===========================================================================
6516 * FUNCTION : lookupHalName
6517 *
6518 * DESCRIPTION: In case the enum is not same in fwk and backend
6519 * make sure the parameter is correctly propogated
6520 *
6521 * PARAMETERS :
6522 * @arr : map between the two enums
6523 * @len : len of the map
6524 * @fwk_name : name of the hal_parm to map
6525 *
6526 * RETURN : int32_t type of status
6527 * hal_name -- success
6528 * none-zero failure code
6529 *==========================================================================*/
6530template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6531 size_t len, fwkType fwk_name)
6532{
6533 for (size_t i = 0; i < len; i++) {
6534 if (arr[i].fwk_name == fwk_name) {
6535 return arr[i].hal_name;
6536 }
6537 }
6538
6539 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6540 return NAME_NOT_FOUND;
6541}
6542
6543/*===========================================================================
6544 * FUNCTION : lookupProp
6545 *
6546 * DESCRIPTION: lookup a value by its name
6547 *
6548 * PARAMETERS :
6549 * @arr : map between the two enums
6550 * @len : size of the map
6551 * @name : name to be looked up
6552 *
6553 * RETURN : Value if found
6554 * CAM_CDS_MODE_MAX if not found
6555 *==========================================================================*/
6556template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6557 size_t len, const char *name)
6558{
6559 if (name) {
6560 for (size_t i = 0; i < len; i++) {
6561 if (!strcmp(arr[i].desc, name)) {
6562 return arr[i].val;
6563 }
6564 }
6565 }
6566 return CAM_CDS_MODE_MAX;
6567}
6568
6569/*===========================================================================
6570 *
6571 * DESCRIPTION:
6572 *
6573 * PARAMETERS :
6574 * @metadata : metadata information from callback
6575 * @timestamp: metadata buffer timestamp
6576 * @request_id: request id
6577 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006578 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006579 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6580 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006581 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006582 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6583 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006584 *
6585 * RETURN : camera_metadata_t*
6586 * metadata in a format specified by fwk
6587 *==========================================================================*/
6588camera_metadata_t*
6589QCamera3HardwareInterface::translateFromHalMetadata(
6590 metadata_buffer_t *metadata,
6591 nsecs_t timestamp,
6592 int32_t request_id,
6593 const CameraMetadata& jpegMetadata,
6594 uint8_t pipeline_depth,
6595 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006596 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006597 /* DevCamDebug metadata translateFromHalMetadata argument */
6598 uint8_t DevCamDebug_meta_enable,
6599 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006600 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006601 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006602 bool lastMetadataInBatch,
6603 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006604{
6605 CameraMetadata camMetadata;
6606 camera_metadata_t *resultMetadata;
6607
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006608 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006609 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6610 * Timestamp is needed because it's used for shutter notify calculation.
6611 * */
6612 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6613 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006614 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006615 }
6616
Thierry Strudel3d639192016-09-09 11:52:26 -07006617 if (jpegMetadata.entryCount())
6618 camMetadata.append(jpegMetadata);
6619
6620 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6621 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6622 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6623 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006624 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006625 if (mBatchSize == 0) {
6626 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6627 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6628 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006629
Samuel Ha68ba5172016-12-15 18:41:12 -08006630 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6631 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6632 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6633 // DevCamDebug metadata translateFromHalMetadata AF
6634 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6635 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6636 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6637 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6638 }
6639 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6640 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6641 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6642 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6643 }
6644 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6645 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6646 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6647 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6648 }
6649 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6650 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6651 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6652 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6653 }
6654 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6655 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6656 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6657 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6658 }
6659 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6660 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6661 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6662 *DevCamDebug_af_monitor_pdaf_target_pos;
6663 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6664 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6665 }
6666 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6667 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6668 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6669 *DevCamDebug_af_monitor_pdaf_confidence;
6670 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6671 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6672 }
6673 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6674 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6675 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6676 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6677 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6678 }
6679 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6680 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6681 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6682 *DevCamDebug_af_monitor_tof_target_pos;
6683 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6684 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6685 }
6686 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6687 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6688 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6689 *DevCamDebug_af_monitor_tof_confidence;
6690 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6691 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6692 }
6693 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6694 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6695 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6696 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6697 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6698 }
6699 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6700 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6701 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6702 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6703 &fwk_DevCamDebug_af_monitor_type_select, 1);
6704 }
6705 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6706 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6707 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6708 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6709 &fwk_DevCamDebug_af_monitor_refocus, 1);
6710 }
6711 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6712 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6713 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6714 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6715 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6716 }
6717 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6718 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6719 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6720 *DevCamDebug_af_search_pdaf_target_pos;
6721 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6722 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6723 }
6724 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6725 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6726 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6727 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6728 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6729 }
6730 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6731 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6732 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6733 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6734 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6735 }
6736 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6737 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6738 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6739 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6740 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6741 }
6742 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6743 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6744 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6745 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6746 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6747 }
6748 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6749 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6750 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6751 *DevCamDebug_af_search_tof_target_pos;
6752 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6753 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6754 }
6755 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6756 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6757 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6758 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6759 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6760 }
6761 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6762 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6763 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6764 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6765 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6766 }
6767 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6768 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6769 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6770 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6771 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6772 }
6773 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6774 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6775 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6776 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6777 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6778 }
6779 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6780 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6781 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6782 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6783 &fwk_DevCamDebug_af_search_type_select, 1);
6784 }
6785 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6786 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6787 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6788 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6789 &fwk_DevCamDebug_af_search_next_pos, 1);
6790 }
6791 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6792 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6793 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6794 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6795 &fwk_DevCamDebug_af_search_target_pos, 1);
6796 }
6797 // DevCamDebug metadata translateFromHalMetadata AEC
6798 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6799 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6800 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6801 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6802 }
6803 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6804 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6805 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6806 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6807 }
6808 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6809 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6810 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6811 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6812 }
6813 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6814 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6815 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6816 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6817 }
6818 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6819 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6820 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6821 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6822 }
6823 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6824 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6825 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6826 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6827 }
6828 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6829 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6830 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6831 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6832 }
6833 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6834 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6835 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6836 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6837 }
Samuel Ha34229982017-02-17 13:51:11 -08006838 // DevCamDebug metadata translateFromHalMetadata zzHDR
6839 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6840 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6841 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6842 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6843 }
6844 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6845 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006846 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006847 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6848 }
6849 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6850 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6851 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6852 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6853 }
6854 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6855 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006856 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006857 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6858 }
6859 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6860 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6861 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6862 *DevCamDebug_aec_hdr_sensitivity_ratio;
6863 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6864 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6865 }
6866 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6867 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6868 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6869 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6870 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6871 }
6872 // DevCamDebug metadata translateFromHalMetadata ADRC
6873 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6874 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6875 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6876 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6877 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6878 }
6879 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6880 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6881 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6882 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6883 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6884 }
6885 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6886 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6887 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6888 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6889 }
6890 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6891 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6892 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6893 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6894 }
6895 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6896 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6897 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6898 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6899 }
6900 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6901 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6902 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6903 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6904 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006905 // DevCamDebug metadata translateFromHalMetadata AWB
6906 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6907 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6908 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6909 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6910 }
6911 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6912 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6913 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6914 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6915 }
6916 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6917 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6918 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6919 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6920 }
6921 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6922 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6923 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6924 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6925 }
6926 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6927 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6928 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6929 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6930 }
6931 }
6932 // atrace_end(ATRACE_TAG_ALWAYS);
6933
Thierry Strudel3d639192016-09-09 11:52:26 -07006934 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6935 int64_t fwk_frame_number = *frame_number;
6936 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6937 }
6938
6939 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6940 int32_t fps_range[2];
6941 fps_range[0] = (int32_t)float_range->min_fps;
6942 fps_range[1] = (int32_t)float_range->max_fps;
6943 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6944 fps_range, 2);
6945 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6946 fps_range[0], fps_range[1]);
6947 }
6948
6949 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6950 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6951 }
6952
6953 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6954 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6955 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6956 *sceneMode);
6957 if (NAME_NOT_FOUND != val) {
6958 uint8_t fwkSceneMode = (uint8_t)val;
6959 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6960 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6961 fwkSceneMode);
6962 }
6963 }
6964
6965 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6966 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6967 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6968 }
6969
6970 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6971 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6972 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6973 }
6974
6975 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6976 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6977 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6978 }
6979
6980 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6981 CAM_INTF_META_EDGE_MODE, metadata) {
6982 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6983 }
6984
6985 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6986 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6987 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6988 }
6989
6990 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6991 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6992 }
6993
6994 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6995 if (0 <= *flashState) {
6996 uint8_t fwk_flashState = (uint8_t) *flashState;
6997 if (!gCamCapability[mCameraId]->flash_available) {
6998 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6999 }
7000 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7001 }
7002 }
7003
7004 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7005 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7006 if (NAME_NOT_FOUND != val) {
7007 uint8_t fwk_flashMode = (uint8_t)val;
7008 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7009 }
7010 }
7011
7012 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7013 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7014 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7015 }
7016
7017 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7018 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7019 }
7020
7021 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7022 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7023 }
7024
7025 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7026 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7027 }
7028
7029 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7030 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7031 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7032 }
7033
7034 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7035 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7036 LOGD("fwk_videoStab = %d", fwk_videoStab);
7037 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7038 } else {
7039 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7040 // and so hardcoding the Video Stab result to OFF mode.
7041 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7042 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007043 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007044 }
7045
7046 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7047 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7048 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7049 }
7050
7051 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7052 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7053 }
7054
Thierry Strudel3d639192016-09-09 11:52:26 -07007055 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7056 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007057 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007058
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007059 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7060 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007061
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007062 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007063 blackLevelAppliedPattern->cam_black_level[0],
7064 blackLevelAppliedPattern->cam_black_level[1],
7065 blackLevelAppliedPattern->cam_black_level[2],
7066 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007067 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7068 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007069
7070#ifndef USE_HAL_3_3
7071 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307072 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007073 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307074 fwk_blackLevelInd[0] /= 16.0;
7075 fwk_blackLevelInd[1] /= 16.0;
7076 fwk_blackLevelInd[2] /= 16.0;
7077 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007078 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7079 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007080#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007081 }
7082
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007083#ifndef USE_HAL_3_3
7084 // Fixed whitelevel is used by ISP/Sensor
7085 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7086 &gCamCapability[mCameraId]->white_level, 1);
7087#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007088
7089 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7090 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7091 int32_t scalerCropRegion[4];
7092 scalerCropRegion[0] = hScalerCropRegion->left;
7093 scalerCropRegion[1] = hScalerCropRegion->top;
7094 scalerCropRegion[2] = hScalerCropRegion->width;
7095 scalerCropRegion[3] = hScalerCropRegion->height;
7096
7097 // Adjust crop region from sensor output coordinate system to active
7098 // array coordinate system.
7099 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7100 scalerCropRegion[2], scalerCropRegion[3]);
7101
7102 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7103 }
7104
7105 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7106 LOGD("sensorExpTime = %lld", *sensorExpTime);
7107 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7108 }
7109
7110 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7111 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7112 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7113 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7114 }
7115
7116 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7117 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7118 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7119 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7120 sensorRollingShutterSkew, 1);
7121 }
7122
7123 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7124 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7125 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7126
7127 //calculate the noise profile based on sensitivity
7128 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7129 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7130 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7131 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7132 noise_profile[i] = noise_profile_S;
7133 noise_profile[i+1] = noise_profile_O;
7134 }
7135 LOGD("noise model entry (S, O) is (%f, %f)",
7136 noise_profile_S, noise_profile_O);
7137 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7138 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7139 }
7140
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007141#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007142 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007143 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007144 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007145 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007146 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7147 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7148 }
7149 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007150#endif
7151
Thierry Strudel3d639192016-09-09 11:52:26 -07007152 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7153 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7154 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7155 }
7156
7157 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7158 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7159 *faceDetectMode);
7160 if (NAME_NOT_FOUND != val) {
7161 uint8_t fwk_faceDetectMode = (uint8_t)val;
7162 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7163
7164 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7165 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7166 CAM_INTF_META_FACE_DETECTION, metadata) {
7167 uint8_t numFaces = MIN(
7168 faceDetectionInfo->num_faces_detected, MAX_ROI);
7169 int32_t faceIds[MAX_ROI];
7170 uint8_t faceScores[MAX_ROI];
7171 int32_t faceRectangles[MAX_ROI * 4];
7172 int32_t faceLandmarks[MAX_ROI * 6];
7173 size_t j = 0, k = 0;
7174
7175 for (size_t i = 0; i < numFaces; i++) {
7176 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7177 // Adjust crop region from sensor output coordinate system to active
7178 // array coordinate system.
7179 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7180 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7181 rect.width, rect.height);
7182
7183 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7184 faceRectangles+j, -1);
7185
Jason Lee8ce36fa2017-04-19 19:40:37 -07007186 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7187 "bottom-right (%d, %d)",
7188 faceDetectionInfo->frame_id, i,
7189 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7190 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7191
Thierry Strudel3d639192016-09-09 11:52:26 -07007192 j+= 4;
7193 }
7194 if (numFaces <= 0) {
7195 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7196 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7197 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7198 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7199 }
7200
7201 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7202 numFaces);
7203 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7204 faceRectangles, numFaces * 4U);
7205 if (fwk_faceDetectMode ==
7206 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7207 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7208 CAM_INTF_META_FACE_LANDMARK, metadata) {
7209
7210 for (size_t i = 0; i < numFaces; i++) {
7211 // Map the co-ordinate sensor output coordinate system to active
7212 // array coordinate system.
7213 mCropRegionMapper.toActiveArray(
7214 landmarks->face_landmarks[i].left_eye_center.x,
7215 landmarks->face_landmarks[i].left_eye_center.y);
7216 mCropRegionMapper.toActiveArray(
7217 landmarks->face_landmarks[i].right_eye_center.x,
7218 landmarks->face_landmarks[i].right_eye_center.y);
7219 mCropRegionMapper.toActiveArray(
7220 landmarks->face_landmarks[i].mouth_center.x,
7221 landmarks->face_landmarks[i].mouth_center.y);
7222
7223 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007224
7225 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7226 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7227 faceDetectionInfo->frame_id, i,
7228 faceLandmarks[k + LEFT_EYE_X],
7229 faceLandmarks[k + LEFT_EYE_Y],
7230 faceLandmarks[k + RIGHT_EYE_X],
7231 faceLandmarks[k + RIGHT_EYE_Y],
7232 faceLandmarks[k + MOUTH_X],
7233 faceLandmarks[k + MOUTH_Y]);
7234
Thierry Strudel04e026f2016-10-10 11:27:36 -07007235 k+= TOTAL_LANDMARK_INDICES;
7236 }
7237 } else {
7238 for (size_t i = 0; i < numFaces; i++) {
7239 setInvalidLandmarks(faceLandmarks+k);
7240 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007241 }
7242 }
7243
Jason Lee49619db2017-04-13 12:07:22 -07007244 for (size_t i = 0; i < numFaces; i++) {
7245 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7246
7247 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7248 faceDetectionInfo->frame_id, i, faceIds[i]);
7249 }
7250
Thierry Strudel3d639192016-09-09 11:52:26 -07007251 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7252 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7253 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007254 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007255 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7256 CAM_INTF_META_FACE_BLINK, metadata) {
7257 uint8_t detected[MAX_ROI];
7258 uint8_t degree[MAX_ROI * 2];
7259 for (size_t i = 0; i < numFaces; i++) {
7260 detected[i] = blinks->blink[i].blink_detected;
7261 degree[2 * i] = blinks->blink[i].left_blink;
7262 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007263
Jason Lee49619db2017-04-13 12:07:22 -07007264 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7265 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7266 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7267 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007268 }
7269 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7270 detected, numFaces);
7271 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7272 degree, numFaces * 2);
7273 }
7274 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7275 CAM_INTF_META_FACE_SMILE, metadata) {
7276 uint8_t degree[MAX_ROI];
7277 uint8_t confidence[MAX_ROI];
7278 for (size_t i = 0; i < numFaces; i++) {
7279 degree[i] = smiles->smile[i].smile_degree;
7280 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007281
Jason Lee49619db2017-04-13 12:07:22 -07007282 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7283 "smile_degree=%d, smile_score=%d",
7284 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007285 }
7286 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7287 degree, numFaces);
7288 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7289 confidence, numFaces);
7290 }
7291 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7292 CAM_INTF_META_FACE_GAZE, metadata) {
7293 int8_t angle[MAX_ROI];
7294 int32_t direction[MAX_ROI * 3];
7295 int8_t degree[MAX_ROI * 2];
7296 for (size_t i = 0; i < numFaces; i++) {
7297 angle[i] = gazes->gaze[i].gaze_angle;
7298 direction[3 * i] = gazes->gaze[i].updown_dir;
7299 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7300 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7301 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7302 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007303
7304 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7305 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7306 "left_right_gaze=%d, top_bottom_gaze=%d",
7307 faceDetectionInfo->frame_id, i, angle[i],
7308 direction[3 * i], direction[3 * i + 1],
7309 direction[3 * i + 2],
7310 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007311 }
7312 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7313 (uint8_t *)angle, numFaces);
7314 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7315 direction, numFaces * 3);
7316 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7317 (uint8_t *)degree, numFaces * 2);
7318 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007319 }
7320 }
7321 }
7322 }
7323
7324 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7325 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007326 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007327 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007328 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007329
Shuzhen Wang14415f52016-11-16 18:26:18 -08007330 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7331 histogramBins = *histBins;
7332 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7333 }
7334
7335 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007336 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7337 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007338 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007339
7340 switch (stats_data->type) {
7341 case CAM_HISTOGRAM_TYPE_BAYER:
7342 switch (stats_data->bayer_stats.data_type) {
7343 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007344 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7345 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007346 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007347 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7348 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007349 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007350 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7351 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007352 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007353 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007354 case CAM_STATS_CHANNEL_R:
7355 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007356 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7357 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007358 }
7359 break;
7360 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007361 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007362 break;
7363 }
7364
Shuzhen Wang14415f52016-11-16 18:26:18 -08007365 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007366 }
7367 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007368 }
7369
7370 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7371 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7372 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7373 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7374 }
7375
7376 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7377 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7378 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7379 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7380 }
7381
7382 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7383 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7384 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7385 CAM_MAX_SHADING_MAP_HEIGHT);
7386 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7387 CAM_MAX_SHADING_MAP_WIDTH);
7388 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7389 lensShadingMap->lens_shading, 4U * map_width * map_height);
7390 }
7391
7392 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7393 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7394 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7395 }
7396
7397 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7398 //Populate CAM_INTF_META_TONEMAP_CURVES
7399 /* ch0 = G, ch 1 = B, ch 2 = R*/
7400 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7401 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7402 tonemap->tonemap_points_cnt,
7403 CAM_MAX_TONEMAP_CURVE_SIZE);
7404 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7405 }
7406
7407 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7408 &tonemap->curves[0].tonemap_points[0][0],
7409 tonemap->tonemap_points_cnt * 2);
7410
7411 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7412 &tonemap->curves[1].tonemap_points[0][0],
7413 tonemap->tonemap_points_cnt * 2);
7414
7415 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7416 &tonemap->curves[2].tonemap_points[0][0],
7417 tonemap->tonemap_points_cnt * 2);
7418 }
7419
7420 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7421 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7422 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7423 CC_GAIN_MAX);
7424 }
7425
7426 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7427 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7428 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7429 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7430 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7431 }
7432
7433 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7434 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7435 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7436 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7437 toneCurve->tonemap_points_cnt,
7438 CAM_MAX_TONEMAP_CURVE_SIZE);
7439 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7440 }
7441 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7442 (float*)toneCurve->curve.tonemap_points,
7443 toneCurve->tonemap_points_cnt * 2);
7444 }
7445
7446 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7447 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7448 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7449 predColorCorrectionGains->gains, 4);
7450 }
7451
7452 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7453 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7454 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7455 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7456 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7457 }
7458
7459 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7460 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7461 }
7462
7463 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7464 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7465 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7466 }
7467
7468 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7469 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7470 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7471 }
7472
7473 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7474 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7475 *effectMode);
7476 if (NAME_NOT_FOUND != val) {
7477 uint8_t fwk_effectMode = (uint8_t)val;
7478 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7479 }
7480 }
7481
7482 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7483 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7484 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7485 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7486 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7487 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7488 }
7489 int32_t fwk_testPatternData[4];
7490 fwk_testPatternData[0] = testPatternData->r;
7491 fwk_testPatternData[3] = testPatternData->b;
7492 switch (gCamCapability[mCameraId]->color_arrangement) {
7493 case CAM_FILTER_ARRANGEMENT_RGGB:
7494 case CAM_FILTER_ARRANGEMENT_GRBG:
7495 fwk_testPatternData[1] = testPatternData->gr;
7496 fwk_testPatternData[2] = testPatternData->gb;
7497 break;
7498 case CAM_FILTER_ARRANGEMENT_GBRG:
7499 case CAM_FILTER_ARRANGEMENT_BGGR:
7500 fwk_testPatternData[2] = testPatternData->gr;
7501 fwk_testPatternData[1] = testPatternData->gb;
7502 break;
7503 default:
7504 LOGE("color arrangement %d is not supported",
7505 gCamCapability[mCameraId]->color_arrangement);
7506 break;
7507 }
7508 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7509 }
7510
7511 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7512 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7513 }
7514
7515 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7516 String8 str((const char *)gps_methods);
7517 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7518 }
7519
7520 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7521 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7522 }
7523
7524 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7525 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7526 }
7527
7528 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7529 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7530 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7531 }
7532
7533 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7534 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7535 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7536 }
7537
7538 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7539 int32_t fwk_thumb_size[2];
7540 fwk_thumb_size[0] = thumb_size->width;
7541 fwk_thumb_size[1] = thumb_size->height;
7542 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7543 }
7544
7545 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7546 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7547 privateData,
7548 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7549 }
7550
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007551 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007552 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007553 meteringMode, 1);
7554 }
7555
Thierry Strudel54dc9782017-02-15 12:12:10 -08007556 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7557 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7558 LOGD("hdr_scene_data: %d %f\n",
7559 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7560 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7561 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7562 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7563 &isHdr, 1);
7564 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7565 &isHdrConfidence, 1);
7566 }
7567
7568
7569
Thierry Strudel3d639192016-09-09 11:52:26 -07007570 if (metadata->is_tuning_params_valid) {
7571 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7572 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7573 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7574
7575
7576 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7577 sizeof(uint32_t));
7578 data += sizeof(uint32_t);
7579
7580 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7581 sizeof(uint32_t));
7582 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7583 data += sizeof(uint32_t);
7584
7585 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7586 sizeof(uint32_t));
7587 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7588 data += sizeof(uint32_t);
7589
7590 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7591 sizeof(uint32_t));
7592 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7593 data += sizeof(uint32_t);
7594
7595 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7596 sizeof(uint32_t));
7597 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7598 data += sizeof(uint32_t);
7599
7600 metadata->tuning_params.tuning_mod3_data_size = 0;
7601 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7602 sizeof(uint32_t));
7603 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7604 data += sizeof(uint32_t);
7605
7606 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7607 TUNING_SENSOR_DATA_MAX);
7608 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7609 count);
7610 data += count;
7611
7612 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7613 TUNING_VFE_DATA_MAX);
7614 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7615 count);
7616 data += count;
7617
7618 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7619 TUNING_CPP_DATA_MAX);
7620 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7621 count);
7622 data += count;
7623
7624 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7625 TUNING_CAC_DATA_MAX);
7626 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7627 count);
7628 data += count;
7629
7630 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7631 (int32_t *)(void *)tuning_meta_data_blob,
7632 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7633 }
7634
7635 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7636 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7637 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7638 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7639 NEUTRAL_COL_POINTS);
7640 }
7641
7642 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7643 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7644 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7645 }
7646
7647 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7648 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7649 // Adjust crop region from sensor output coordinate system to active
7650 // array coordinate system.
7651 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7652 hAeRegions->rect.width, hAeRegions->rect.height);
7653
7654 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7655 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7656 REGIONS_TUPLE_COUNT);
7657 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7658 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7659 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7660 hAeRegions->rect.height);
7661 }
7662
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007663 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7664 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7665 if (NAME_NOT_FOUND != val) {
7666 uint8_t fwkAfMode = (uint8_t)val;
7667 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7668 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7669 } else {
7670 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7671 val);
7672 }
7673 }
7674
Thierry Strudel3d639192016-09-09 11:52:26 -07007675 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7676 uint8_t fwk_afState = (uint8_t) *afState;
7677 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007678 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007679 }
7680
7681 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7682 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7683 }
7684
7685 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7686 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7687 }
7688
7689 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7690 uint8_t fwk_lensState = *lensState;
7691 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7692 }
7693
Thierry Strudel3d639192016-09-09 11:52:26 -07007694
7695 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007696 uint32_t ab_mode = *hal_ab_mode;
7697 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7698 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7699 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7700 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007701 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007702 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007703 if (NAME_NOT_FOUND != val) {
7704 uint8_t fwk_ab_mode = (uint8_t)val;
7705 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7706 }
7707 }
7708
7709 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7710 int val = lookupFwkName(SCENE_MODES_MAP,
7711 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7712 if (NAME_NOT_FOUND != val) {
7713 uint8_t fwkBestshotMode = (uint8_t)val;
7714 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7715 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7716 } else {
7717 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7718 }
7719 }
7720
7721 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7722 uint8_t fwk_mode = (uint8_t) *mode;
7723 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7724 }
7725
7726 /* Constant metadata values to be update*/
7727 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7728 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7729
7730 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7731 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7732
7733 int32_t hotPixelMap[2];
7734 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7735
7736 // CDS
7737 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7738 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7739 }
7740
Thierry Strudel04e026f2016-10-10 11:27:36 -07007741 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7742 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007743 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007744 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7745 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7746 } else {
7747 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7748 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007749
7750 if(fwk_hdr != curr_hdr_state) {
7751 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7752 if(fwk_hdr)
7753 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7754 else
7755 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7756 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007757 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7758 }
7759
Thierry Strudel54dc9782017-02-15 12:12:10 -08007760 //binning correction
7761 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7762 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7763 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7764 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7765 }
7766
Thierry Strudel04e026f2016-10-10 11:27:36 -07007767 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007768 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007769 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7770 int8_t is_ir_on = 0;
7771
7772 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7773 if(is_ir_on != curr_ir_state) {
7774 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7775 if(is_ir_on)
7776 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7777 else
7778 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7779 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007780 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007781 }
7782
Thierry Strudel269c81a2016-10-12 12:13:59 -07007783 // AEC SPEED
7784 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7785 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7786 }
7787
7788 // AWB SPEED
7789 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7790 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7791 }
7792
Thierry Strudel3d639192016-09-09 11:52:26 -07007793 // TNR
7794 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7795 uint8_t tnr_enable = tnr->denoise_enable;
7796 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007797 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7798 int8_t is_tnr_on = 0;
7799
7800 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7801 if(is_tnr_on != curr_tnr_state) {
7802 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7803 if(is_tnr_on)
7804 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7805 else
7806 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7807 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007808
7809 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7810 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7811 }
7812
7813 // Reprocess crop data
7814 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7815 uint8_t cnt = crop_data->num_of_streams;
7816 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7817 // mm-qcamera-daemon only posts crop_data for streams
7818 // not linked to pproc. So no valid crop metadata is not
7819 // necessarily an error case.
7820 LOGD("No valid crop metadata entries");
7821 } else {
7822 uint32_t reproc_stream_id;
7823 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7824 LOGD("No reprocessible stream found, ignore crop data");
7825 } else {
7826 int rc = NO_ERROR;
7827 Vector<int32_t> roi_map;
7828 int32_t *crop = new int32_t[cnt*4];
7829 if (NULL == crop) {
7830 rc = NO_MEMORY;
7831 }
7832 if (NO_ERROR == rc) {
7833 int32_t streams_found = 0;
7834 for (size_t i = 0; i < cnt; i++) {
7835 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7836 if (pprocDone) {
7837 // HAL already does internal reprocessing,
7838 // either via reprocessing before JPEG encoding,
7839 // or offline postprocessing for pproc bypass case.
7840 crop[0] = 0;
7841 crop[1] = 0;
7842 crop[2] = mInputStreamInfo.dim.width;
7843 crop[3] = mInputStreamInfo.dim.height;
7844 } else {
7845 crop[0] = crop_data->crop_info[i].crop.left;
7846 crop[1] = crop_data->crop_info[i].crop.top;
7847 crop[2] = crop_data->crop_info[i].crop.width;
7848 crop[3] = crop_data->crop_info[i].crop.height;
7849 }
7850 roi_map.add(crop_data->crop_info[i].roi_map.left);
7851 roi_map.add(crop_data->crop_info[i].roi_map.top);
7852 roi_map.add(crop_data->crop_info[i].roi_map.width);
7853 roi_map.add(crop_data->crop_info[i].roi_map.height);
7854 streams_found++;
7855 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7856 crop[0], crop[1], crop[2], crop[3]);
7857 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7858 crop_data->crop_info[i].roi_map.left,
7859 crop_data->crop_info[i].roi_map.top,
7860 crop_data->crop_info[i].roi_map.width,
7861 crop_data->crop_info[i].roi_map.height);
7862 break;
7863
7864 }
7865 }
7866 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7867 &streams_found, 1);
7868 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7869 crop, (size_t)(streams_found * 4));
7870 if (roi_map.array()) {
7871 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7872 roi_map.array(), roi_map.size());
7873 }
7874 }
7875 if (crop) {
7876 delete [] crop;
7877 }
7878 }
7879 }
7880 }
7881
7882 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7883 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7884 // so hardcoding the CAC result to OFF mode.
7885 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7886 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7887 } else {
7888 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7889 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7890 *cacMode);
7891 if (NAME_NOT_FOUND != val) {
7892 uint8_t resultCacMode = (uint8_t)val;
7893 // check whether CAC result from CB is equal to Framework set CAC mode
7894 // If not equal then set the CAC mode came in corresponding request
7895 if (fwk_cacMode != resultCacMode) {
7896 resultCacMode = fwk_cacMode;
7897 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007898 //Check if CAC is disabled by property
7899 if (m_cacModeDisabled) {
7900 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7901 }
7902
Thierry Strudel3d639192016-09-09 11:52:26 -07007903 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7904 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7905 } else {
7906 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7907 }
7908 }
7909 }
7910
7911 // Post blob of cam_cds_data through vendor tag.
7912 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7913 uint8_t cnt = cdsInfo->num_of_streams;
7914 cam_cds_data_t cdsDataOverride;
7915 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7916 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7917 cdsDataOverride.num_of_streams = 1;
7918 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7919 uint32_t reproc_stream_id;
7920 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7921 LOGD("No reprocessible stream found, ignore cds data");
7922 } else {
7923 for (size_t i = 0; i < cnt; i++) {
7924 if (cdsInfo->cds_info[i].stream_id ==
7925 reproc_stream_id) {
7926 cdsDataOverride.cds_info[0].cds_enable =
7927 cdsInfo->cds_info[i].cds_enable;
7928 break;
7929 }
7930 }
7931 }
7932 } else {
7933 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7934 }
7935 camMetadata.update(QCAMERA3_CDS_INFO,
7936 (uint8_t *)&cdsDataOverride,
7937 sizeof(cam_cds_data_t));
7938 }
7939
7940 // Ldaf calibration data
7941 if (!mLdafCalibExist) {
7942 IF_META_AVAILABLE(uint32_t, ldafCalib,
7943 CAM_INTF_META_LDAF_EXIF, metadata) {
7944 mLdafCalibExist = true;
7945 mLdafCalib[0] = ldafCalib[0];
7946 mLdafCalib[1] = ldafCalib[1];
7947 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7948 ldafCalib[0], ldafCalib[1]);
7949 }
7950 }
7951
Thierry Strudel54dc9782017-02-15 12:12:10 -08007952 // EXIF debug data through vendor tag
7953 /*
7954 * Mobicat Mask can assume 3 values:
7955 * 1 refers to Mobicat data,
7956 * 2 refers to Stats Debug and Exif Debug Data
7957 * 3 refers to Mobicat and Stats Debug Data
7958 * We want to make sure that we are sending Exif debug data
7959 * only when Mobicat Mask is 2.
7960 */
7961 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7962 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7963 (uint8_t *)(void *)mExifParams.debug_params,
7964 sizeof(mm_jpeg_debug_exif_params_t));
7965 }
7966
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007967 // Reprocess and DDM debug data through vendor tag
7968 cam_reprocess_info_t repro_info;
7969 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007970 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7971 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007972 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007973 }
7974 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7975 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007976 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007977 }
7978 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7979 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007980 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007981 }
7982 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7983 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007984 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007985 }
7986 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7987 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007988 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007989 }
7990 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007991 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007992 }
7993 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7994 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007995 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007996 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007997 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7998 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7999 }
8000 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8001 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8002 }
8003 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8004 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008005
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008006 // INSTANT AEC MODE
8007 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8008 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8009 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8010 }
8011
Shuzhen Wange763e802016-03-31 10:24:29 -07008012 // AF scene change
8013 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8014 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8015 }
8016
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008017 // Enable ZSL
8018 if (enableZsl != nullptr) {
8019 uint8_t value = *enableZsl ?
8020 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8021 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8022 }
8023
Thierry Strudel3d639192016-09-09 11:52:26 -07008024 resultMetadata = camMetadata.release();
8025 return resultMetadata;
8026}
8027
8028/*===========================================================================
8029 * FUNCTION : saveExifParams
8030 *
8031 * DESCRIPTION:
8032 *
8033 * PARAMETERS :
8034 * @metadata : metadata information from callback
8035 *
8036 * RETURN : none
8037 *
8038 *==========================================================================*/
8039void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8040{
8041 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8042 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8043 if (mExifParams.debug_params) {
8044 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8045 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8046 }
8047 }
8048 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8049 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8050 if (mExifParams.debug_params) {
8051 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8052 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8053 }
8054 }
8055 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8056 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8057 if (mExifParams.debug_params) {
8058 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8059 mExifParams.debug_params->af_debug_params_valid = TRUE;
8060 }
8061 }
8062 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8063 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8064 if (mExifParams.debug_params) {
8065 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8066 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8067 }
8068 }
8069 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8070 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8071 if (mExifParams.debug_params) {
8072 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8073 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8074 }
8075 }
8076 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8077 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8078 if (mExifParams.debug_params) {
8079 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8080 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8081 }
8082 }
8083 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8084 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8085 if (mExifParams.debug_params) {
8086 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8087 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8088 }
8089 }
8090 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8091 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8092 if (mExifParams.debug_params) {
8093 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8094 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8095 }
8096 }
8097}
8098
8099/*===========================================================================
8100 * FUNCTION : get3AExifParams
8101 *
8102 * DESCRIPTION:
8103 *
8104 * PARAMETERS : none
8105 *
8106 *
8107 * RETURN : mm_jpeg_exif_params_t
8108 *
8109 *==========================================================================*/
8110mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8111{
8112 return mExifParams;
8113}
8114
8115/*===========================================================================
8116 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8117 *
8118 * DESCRIPTION:
8119 *
8120 * PARAMETERS :
8121 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008122 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8123 * urgent metadata in a batch. Always true for
8124 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008125 *
8126 * RETURN : camera_metadata_t*
8127 * metadata in a format specified by fwk
8128 *==========================================================================*/
8129camera_metadata_t*
8130QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008131 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008132{
8133 CameraMetadata camMetadata;
8134 camera_metadata_t *resultMetadata;
8135
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008136 if (!lastUrgentMetadataInBatch) {
8137 /* In batch mode, use empty metadata if this is not the last in batch
8138 */
8139 resultMetadata = allocate_camera_metadata(0, 0);
8140 return resultMetadata;
8141 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008142
8143 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8144 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8145 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8146 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8147 }
8148
8149 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8150 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8151 &aecTrigger->trigger, 1);
8152 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8153 &aecTrigger->trigger_id, 1);
8154 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8155 aecTrigger->trigger);
8156 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8157 aecTrigger->trigger_id);
8158 }
8159
8160 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8161 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8162 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8163 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8164 }
8165
Thierry Strudel3d639192016-09-09 11:52:26 -07008166 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8167 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8168 &af_trigger->trigger, 1);
8169 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8170 af_trigger->trigger);
8171 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8172 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8173 af_trigger->trigger_id);
8174 }
8175
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008176 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8177 /*af regions*/
8178 int32_t afRegions[REGIONS_TUPLE_COUNT];
8179 // Adjust crop region from sensor output coordinate system to active
8180 // array coordinate system.
8181 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8182 hAfRegions->rect.width, hAfRegions->rect.height);
8183
8184 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8185 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8186 REGIONS_TUPLE_COUNT);
8187 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8188 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8189 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8190 hAfRegions->rect.height);
8191 }
8192
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008193 // AF region confidence
8194 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8195 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8196 }
8197
Thierry Strudel3d639192016-09-09 11:52:26 -07008198 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8199 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8200 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8201 if (NAME_NOT_FOUND != val) {
8202 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8203 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8204 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8205 } else {
8206 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8207 }
8208 }
8209
8210 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8211 uint32_t aeMode = CAM_AE_MODE_MAX;
8212 int32_t flashMode = CAM_FLASH_MODE_MAX;
8213 int32_t redeye = -1;
8214 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8215 aeMode = *pAeMode;
8216 }
8217 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8218 flashMode = *pFlashMode;
8219 }
8220 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8221 redeye = *pRedeye;
8222 }
8223
8224 if (1 == redeye) {
8225 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8226 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8227 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8228 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8229 flashMode);
8230 if (NAME_NOT_FOUND != val) {
8231 fwk_aeMode = (uint8_t)val;
8232 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8233 } else {
8234 LOGE("Unsupported flash mode %d", flashMode);
8235 }
8236 } else if (aeMode == CAM_AE_MODE_ON) {
8237 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8238 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8239 } else if (aeMode == CAM_AE_MODE_OFF) {
8240 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8241 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008242 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8243 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8244 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008245 } else {
8246 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8247 "flashMode:%d, aeMode:%u!!!",
8248 redeye, flashMode, aeMode);
8249 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008250 if (mInstantAEC) {
8251 // Increment frame Idx count untill a bound reached for instant AEC.
8252 mInstantAecFrameIdxCount++;
8253 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8254 CAM_INTF_META_AEC_INFO, metadata) {
8255 LOGH("ae_params->settled = %d",ae_params->settled);
8256 // If AEC settled, or if number of frames reached bound value,
8257 // should reset instant AEC.
8258 if (ae_params->settled ||
8259 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8260 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8261 mInstantAEC = false;
8262 mResetInstantAEC = true;
8263 mInstantAecFrameIdxCount = 0;
8264 }
8265 }
8266 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008267 resultMetadata = camMetadata.release();
8268 return resultMetadata;
8269}
8270
8271/*===========================================================================
8272 * FUNCTION : dumpMetadataToFile
8273 *
8274 * DESCRIPTION: Dumps tuning metadata to file system
8275 *
8276 * PARAMETERS :
8277 * @meta : tuning metadata
8278 * @dumpFrameCount : current dump frame count
8279 * @enabled : Enable mask
8280 *
8281 *==========================================================================*/
8282void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8283 uint32_t &dumpFrameCount,
8284 bool enabled,
8285 const char *type,
8286 uint32_t frameNumber)
8287{
8288 //Some sanity checks
8289 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8290 LOGE("Tuning sensor data size bigger than expected %d: %d",
8291 meta.tuning_sensor_data_size,
8292 TUNING_SENSOR_DATA_MAX);
8293 return;
8294 }
8295
8296 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8297 LOGE("Tuning VFE data size bigger than expected %d: %d",
8298 meta.tuning_vfe_data_size,
8299 TUNING_VFE_DATA_MAX);
8300 return;
8301 }
8302
8303 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8304 LOGE("Tuning CPP data size bigger than expected %d: %d",
8305 meta.tuning_cpp_data_size,
8306 TUNING_CPP_DATA_MAX);
8307 return;
8308 }
8309
8310 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8311 LOGE("Tuning CAC data size bigger than expected %d: %d",
8312 meta.tuning_cac_data_size,
8313 TUNING_CAC_DATA_MAX);
8314 return;
8315 }
8316 //
8317
8318 if(enabled){
8319 char timeBuf[FILENAME_MAX];
8320 char buf[FILENAME_MAX];
8321 memset(buf, 0, sizeof(buf));
8322 memset(timeBuf, 0, sizeof(timeBuf));
8323 time_t current_time;
8324 struct tm * timeinfo;
8325 time (&current_time);
8326 timeinfo = localtime (&current_time);
8327 if (timeinfo != NULL) {
8328 strftime (timeBuf, sizeof(timeBuf),
8329 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8330 }
8331 String8 filePath(timeBuf);
8332 snprintf(buf,
8333 sizeof(buf),
8334 "%dm_%s_%d.bin",
8335 dumpFrameCount,
8336 type,
8337 frameNumber);
8338 filePath.append(buf);
8339 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8340 if (file_fd >= 0) {
8341 ssize_t written_len = 0;
8342 meta.tuning_data_version = TUNING_DATA_VERSION;
8343 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8344 written_len += write(file_fd, data, sizeof(uint32_t));
8345 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8346 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8347 written_len += write(file_fd, data, sizeof(uint32_t));
8348 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8349 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8350 written_len += write(file_fd, data, sizeof(uint32_t));
8351 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8352 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8353 written_len += write(file_fd, data, sizeof(uint32_t));
8354 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8355 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8356 written_len += write(file_fd, data, sizeof(uint32_t));
8357 meta.tuning_mod3_data_size = 0;
8358 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8359 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8360 written_len += write(file_fd, data, sizeof(uint32_t));
8361 size_t total_size = meta.tuning_sensor_data_size;
8362 data = (void *)((uint8_t *)&meta.data);
8363 written_len += write(file_fd, data, total_size);
8364 total_size = meta.tuning_vfe_data_size;
8365 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8366 written_len += write(file_fd, data, total_size);
8367 total_size = meta.tuning_cpp_data_size;
8368 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8369 written_len += write(file_fd, data, total_size);
8370 total_size = meta.tuning_cac_data_size;
8371 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8372 written_len += write(file_fd, data, total_size);
8373 close(file_fd);
8374 }else {
8375 LOGE("fail to open file for metadata dumping");
8376 }
8377 }
8378}
8379
8380/*===========================================================================
8381 * FUNCTION : cleanAndSortStreamInfo
8382 *
8383 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8384 * and sort them such that raw stream is at the end of the list
8385 * This is a workaround for camera daemon constraint.
8386 *
8387 * PARAMETERS : None
8388 *
8389 *==========================================================================*/
8390void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8391{
8392 List<stream_info_t *> newStreamInfo;
8393
8394 /*clean up invalid streams*/
8395 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8396 it != mStreamInfo.end();) {
8397 if(((*it)->status) == INVALID){
8398 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8399 delete channel;
8400 free(*it);
8401 it = mStreamInfo.erase(it);
8402 } else {
8403 it++;
8404 }
8405 }
8406
8407 // Move preview/video/callback/snapshot streams into newList
8408 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8409 it != mStreamInfo.end();) {
8410 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8411 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8412 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8413 newStreamInfo.push_back(*it);
8414 it = mStreamInfo.erase(it);
8415 } else
8416 it++;
8417 }
8418 // Move raw streams into newList
8419 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8420 it != mStreamInfo.end();) {
8421 newStreamInfo.push_back(*it);
8422 it = mStreamInfo.erase(it);
8423 }
8424
8425 mStreamInfo = newStreamInfo;
8426}
8427
8428/*===========================================================================
8429 * FUNCTION : extractJpegMetadata
8430 *
8431 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8432 * JPEG metadata is cached in HAL, and return as part of capture
8433 * result when metadata is returned from camera daemon.
8434 *
8435 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8436 * @request: capture request
8437 *
8438 *==========================================================================*/
8439void QCamera3HardwareInterface::extractJpegMetadata(
8440 CameraMetadata& jpegMetadata,
8441 const camera3_capture_request_t *request)
8442{
8443 CameraMetadata frame_settings;
8444 frame_settings = request->settings;
8445
8446 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8447 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8448 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8449 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8450
8451 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8452 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8453 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8454 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8455
8456 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8457 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8458 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8459 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8460
8461 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8462 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8463 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8464 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8465
8466 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8467 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8468 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8469 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8470
8471 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8472 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8473 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8474 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8475
8476 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8477 int32_t thumbnail_size[2];
8478 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8479 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8480 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8481 int32_t orientation =
8482 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008483 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008484 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8485 int32_t temp;
8486 temp = thumbnail_size[0];
8487 thumbnail_size[0] = thumbnail_size[1];
8488 thumbnail_size[1] = temp;
8489 }
8490 }
8491 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8492 thumbnail_size,
8493 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8494 }
8495
8496}
8497
8498/*===========================================================================
8499 * FUNCTION : convertToRegions
8500 *
8501 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8502 *
8503 * PARAMETERS :
8504 * @rect : cam_rect_t struct to convert
8505 * @region : int32_t destination array
8506 * @weight : if we are converting from cam_area_t, weight is valid
8507 * else weight = -1
8508 *
8509 *==========================================================================*/
8510void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8511 int32_t *region, int weight)
8512{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008513 region[FACE_LEFT] = rect.left;
8514 region[FACE_TOP] = rect.top;
8515 region[FACE_RIGHT] = rect.left + rect.width;
8516 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008517 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008518 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008519 }
8520}
8521
8522/*===========================================================================
8523 * FUNCTION : convertFromRegions
8524 *
8525 * DESCRIPTION: helper method to convert from array to cam_rect_t
8526 *
8527 * PARAMETERS :
8528 * @rect : cam_rect_t struct to convert
8529 * @region : int32_t destination array
8530 * @weight : if we are converting from cam_area_t, weight is valid
8531 * else weight = -1
8532 *
8533 *==========================================================================*/
8534void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008535 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008536{
Thierry Strudel3d639192016-09-09 11:52:26 -07008537 int32_t x_min = frame_settings.find(tag).data.i32[0];
8538 int32_t y_min = frame_settings.find(tag).data.i32[1];
8539 int32_t x_max = frame_settings.find(tag).data.i32[2];
8540 int32_t y_max = frame_settings.find(tag).data.i32[3];
8541 roi.weight = frame_settings.find(tag).data.i32[4];
8542 roi.rect.left = x_min;
8543 roi.rect.top = y_min;
8544 roi.rect.width = x_max - x_min;
8545 roi.rect.height = y_max - y_min;
8546}
8547
8548/*===========================================================================
8549 * FUNCTION : resetIfNeededROI
8550 *
8551 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8552 * crop region
8553 *
8554 * PARAMETERS :
8555 * @roi : cam_area_t struct to resize
8556 * @scalerCropRegion : cam_crop_region_t region to compare against
8557 *
8558 *
8559 *==========================================================================*/
8560bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8561 const cam_crop_region_t* scalerCropRegion)
8562{
8563 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8564 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8565 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8566 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8567
8568 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8569 * without having this check the calculations below to validate if the roi
8570 * is inside scalar crop region will fail resulting in the roi not being
8571 * reset causing algorithm to continue to use stale roi window
8572 */
8573 if (roi->weight == 0) {
8574 return true;
8575 }
8576
8577 if ((roi_x_max < scalerCropRegion->left) ||
8578 // right edge of roi window is left of scalar crop's left edge
8579 (roi_y_max < scalerCropRegion->top) ||
8580 // bottom edge of roi window is above scalar crop's top edge
8581 (roi->rect.left > crop_x_max) ||
8582 // left edge of roi window is beyond(right) of scalar crop's right edge
8583 (roi->rect.top > crop_y_max)){
8584 // top edge of roi windo is above scalar crop's top edge
8585 return false;
8586 }
8587 if (roi->rect.left < scalerCropRegion->left) {
8588 roi->rect.left = scalerCropRegion->left;
8589 }
8590 if (roi->rect.top < scalerCropRegion->top) {
8591 roi->rect.top = scalerCropRegion->top;
8592 }
8593 if (roi_x_max > crop_x_max) {
8594 roi_x_max = crop_x_max;
8595 }
8596 if (roi_y_max > crop_y_max) {
8597 roi_y_max = crop_y_max;
8598 }
8599 roi->rect.width = roi_x_max - roi->rect.left;
8600 roi->rect.height = roi_y_max - roi->rect.top;
8601 return true;
8602}
8603
8604/*===========================================================================
8605 * FUNCTION : convertLandmarks
8606 *
8607 * DESCRIPTION: helper method to extract the landmarks from face detection info
8608 *
8609 * PARAMETERS :
8610 * @landmark_data : input landmark data to be converted
8611 * @landmarks : int32_t destination array
8612 *
8613 *
8614 *==========================================================================*/
8615void QCamera3HardwareInterface::convertLandmarks(
8616 cam_face_landmarks_info_t landmark_data,
8617 int32_t *landmarks)
8618{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008619 if (landmark_data.is_left_eye_valid) {
8620 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8621 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8622 } else {
8623 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8624 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8625 }
8626
8627 if (landmark_data.is_right_eye_valid) {
8628 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8629 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8630 } else {
8631 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8632 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8633 }
8634
8635 if (landmark_data.is_mouth_valid) {
8636 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8637 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8638 } else {
8639 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8640 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8641 }
8642}
8643
8644/*===========================================================================
8645 * FUNCTION : setInvalidLandmarks
8646 *
8647 * DESCRIPTION: helper method to set invalid landmarks
8648 *
8649 * PARAMETERS :
8650 * @landmarks : int32_t destination array
8651 *
8652 *
8653 *==========================================================================*/
8654void QCamera3HardwareInterface::setInvalidLandmarks(
8655 int32_t *landmarks)
8656{
8657 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8658 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8659 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8660 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8661 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8662 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008663}
8664
8665#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008666
8667/*===========================================================================
8668 * FUNCTION : getCapabilities
8669 *
8670 * DESCRIPTION: query camera capability from back-end
8671 *
8672 * PARAMETERS :
8673 * @ops : mm-interface ops structure
8674 * @cam_handle : camera handle for which we need capability
8675 *
8676 * RETURN : ptr type of capability structure
8677 * capability for success
8678 * NULL for failure
8679 *==========================================================================*/
8680cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8681 uint32_t cam_handle)
8682{
8683 int rc = NO_ERROR;
8684 QCamera3HeapMemory *capabilityHeap = NULL;
8685 cam_capability_t *cap_ptr = NULL;
8686
8687 if (ops == NULL) {
8688 LOGE("Invalid arguments");
8689 return NULL;
8690 }
8691
8692 capabilityHeap = new QCamera3HeapMemory(1);
8693 if (capabilityHeap == NULL) {
8694 LOGE("creation of capabilityHeap failed");
8695 return NULL;
8696 }
8697
8698 /* Allocate memory for capability buffer */
8699 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8700 if(rc != OK) {
8701 LOGE("No memory for cappability");
8702 goto allocate_failed;
8703 }
8704
8705 /* Map memory for capability buffer */
8706 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8707
8708 rc = ops->map_buf(cam_handle,
8709 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8710 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8711 if(rc < 0) {
8712 LOGE("failed to map capability buffer");
8713 rc = FAILED_TRANSACTION;
8714 goto map_failed;
8715 }
8716
8717 /* Query Capability */
8718 rc = ops->query_capability(cam_handle);
8719 if(rc < 0) {
8720 LOGE("failed to query capability");
8721 rc = FAILED_TRANSACTION;
8722 goto query_failed;
8723 }
8724
8725 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8726 if (cap_ptr == NULL) {
8727 LOGE("out of memory");
8728 rc = NO_MEMORY;
8729 goto query_failed;
8730 }
8731
8732 memset(cap_ptr, 0, sizeof(cam_capability_t));
8733 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8734
8735 int index;
8736 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8737 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8738 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8739 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8740 }
8741
8742query_failed:
8743 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8744map_failed:
8745 capabilityHeap->deallocate();
8746allocate_failed:
8747 delete capabilityHeap;
8748
8749 if (rc != NO_ERROR) {
8750 return NULL;
8751 } else {
8752 return cap_ptr;
8753 }
8754}
8755
Thierry Strudel3d639192016-09-09 11:52:26 -07008756/*===========================================================================
8757 * FUNCTION : initCapabilities
8758 *
8759 * DESCRIPTION: initialize camera capabilities in static data struct
8760 *
8761 * PARAMETERS :
8762 * @cameraId : camera Id
8763 *
8764 * RETURN : int32_t type of status
8765 * NO_ERROR -- success
8766 * none-zero failure code
8767 *==========================================================================*/
8768int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8769{
8770 int rc = 0;
8771 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008772 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008773
8774 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8775 if (rc) {
8776 LOGE("camera_open failed. rc = %d", rc);
8777 goto open_failed;
8778 }
8779 if (!cameraHandle) {
8780 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8781 goto open_failed;
8782 }
8783
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008784 handle = get_main_camera_handle(cameraHandle->camera_handle);
8785 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8786 if (gCamCapability[cameraId] == NULL) {
8787 rc = FAILED_TRANSACTION;
8788 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008789 }
8790
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008791 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008792 if (is_dual_camera_by_idx(cameraId)) {
8793 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8794 gCamCapability[cameraId]->aux_cam_cap =
8795 getCapabilities(cameraHandle->ops, handle);
8796 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8797 rc = FAILED_TRANSACTION;
8798 free(gCamCapability[cameraId]);
8799 goto failed_op;
8800 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008801
8802 // Copy the main camera capability to main_cam_cap struct
8803 gCamCapability[cameraId]->main_cam_cap =
8804 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8805 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8806 LOGE("out of memory");
8807 rc = NO_MEMORY;
8808 goto failed_op;
8809 }
8810 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8811 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008812 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008813failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008814 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8815 cameraHandle = NULL;
8816open_failed:
8817 return rc;
8818}
8819
8820/*==========================================================================
8821 * FUNCTION : get3Aversion
8822 *
8823 * DESCRIPTION: get the Q3A S/W version
8824 *
8825 * PARAMETERS :
8826 * @sw_version: Reference of Q3A structure which will hold version info upon
8827 * return
8828 *
8829 * RETURN : None
8830 *
8831 *==========================================================================*/
8832void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8833{
8834 if(gCamCapability[mCameraId])
8835 sw_version = gCamCapability[mCameraId]->q3a_version;
8836 else
8837 LOGE("Capability structure NULL!");
8838}
8839
8840
8841/*===========================================================================
8842 * FUNCTION : initParameters
8843 *
8844 * DESCRIPTION: initialize camera parameters
8845 *
8846 * PARAMETERS :
8847 *
8848 * RETURN : int32_t type of status
8849 * NO_ERROR -- success
8850 * none-zero failure code
8851 *==========================================================================*/
8852int QCamera3HardwareInterface::initParameters()
8853{
8854 int rc = 0;
8855
8856 //Allocate Set Param Buffer
8857 mParamHeap = new QCamera3HeapMemory(1);
8858 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8859 if(rc != OK) {
8860 rc = NO_MEMORY;
8861 LOGE("Failed to allocate SETPARM Heap memory");
8862 delete mParamHeap;
8863 mParamHeap = NULL;
8864 return rc;
8865 }
8866
8867 //Map memory for parameters buffer
8868 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8869 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8870 mParamHeap->getFd(0),
8871 sizeof(metadata_buffer_t),
8872 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8873 if(rc < 0) {
8874 LOGE("failed to map SETPARM buffer");
8875 rc = FAILED_TRANSACTION;
8876 mParamHeap->deallocate();
8877 delete mParamHeap;
8878 mParamHeap = NULL;
8879 return rc;
8880 }
8881
8882 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8883
8884 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8885 return rc;
8886}
8887
8888/*===========================================================================
8889 * FUNCTION : deinitParameters
8890 *
8891 * DESCRIPTION: de-initialize camera parameters
8892 *
8893 * PARAMETERS :
8894 *
8895 * RETURN : NONE
8896 *==========================================================================*/
8897void QCamera3HardwareInterface::deinitParameters()
8898{
8899 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8900 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8901
8902 mParamHeap->deallocate();
8903 delete mParamHeap;
8904 mParamHeap = NULL;
8905
8906 mParameters = NULL;
8907
8908 free(mPrevParameters);
8909 mPrevParameters = NULL;
8910}
8911
8912/*===========================================================================
8913 * FUNCTION : calcMaxJpegSize
8914 *
8915 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8916 *
8917 * PARAMETERS :
8918 *
8919 * RETURN : max_jpeg_size
8920 *==========================================================================*/
8921size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8922{
8923 size_t max_jpeg_size = 0;
8924 size_t temp_width, temp_height;
8925 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8926 MAX_SIZES_CNT);
8927 for (size_t i = 0; i < count; i++) {
8928 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8929 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8930 if (temp_width * temp_height > max_jpeg_size ) {
8931 max_jpeg_size = temp_width * temp_height;
8932 }
8933 }
8934 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8935 return max_jpeg_size;
8936}
8937
8938/*===========================================================================
8939 * FUNCTION : getMaxRawSize
8940 *
8941 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8942 *
8943 * PARAMETERS :
8944 *
8945 * RETURN : Largest supported Raw Dimension
8946 *==========================================================================*/
8947cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8948{
8949 int max_width = 0;
8950 cam_dimension_t maxRawSize;
8951
8952 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8953 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8954 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8955 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8956 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8957 }
8958 }
8959 return maxRawSize;
8960}
8961
8962
8963/*===========================================================================
8964 * FUNCTION : calcMaxJpegDim
8965 *
8966 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8967 *
8968 * PARAMETERS :
8969 *
8970 * RETURN : max_jpeg_dim
8971 *==========================================================================*/
8972cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8973{
8974 cam_dimension_t max_jpeg_dim;
8975 cam_dimension_t curr_jpeg_dim;
8976 max_jpeg_dim.width = 0;
8977 max_jpeg_dim.height = 0;
8978 curr_jpeg_dim.width = 0;
8979 curr_jpeg_dim.height = 0;
8980 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8981 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8982 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8983 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8984 max_jpeg_dim.width * max_jpeg_dim.height ) {
8985 max_jpeg_dim.width = curr_jpeg_dim.width;
8986 max_jpeg_dim.height = curr_jpeg_dim.height;
8987 }
8988 }
8989 return max_jpeg_dim;
8990}
8991
8992/*===========================================================================
8993 * FUNCTION : addStreamConfig
8994 *
8995 * DESCRIPTION: adds the stream configuration to the array
8996 *
8997 * PARAMETERS :
8998 * @available_stream_configs : pointer to stream configuration array
8999 * @scalar_format : scalar format
9000 * @dim : configuration dimension
9001 * @config_type : input or output configuration type
9002 *
9003 * RETURN : NONE
9004 *==========================================================================*/
9005void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9006 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9007{
9008 available_stream_configs.add(scalar_format);
9009 available_stream_configs.add(dim.width);
9010 available_stream_configs.add(dim.height);
9011 available_stream_configs.add(config_type);
9012}
9013
9014/*===========================================================================
9015 * FUNCTION : suppportBurstCapture
9016 *
9017 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9018 *
9019 * PARAMETERS :
9020 * @cameraId : camera Id
9021 *
9022 * RETURN : true if camera supports BURST_CAPTURE
9023 * false otherwise
9024 *==========================================================================*/
9025bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9026{
9027 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9028 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9029 const int32_t highResWidth = 3264;
9030 const int32_t highResHeight = 2448;
9031
9032 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9033 // Maximum resolution images cannot be captured at >= 10fps
9034 // -> not supporting BURST_CAPTURE
9035 return false;
9036 }
9037
9038 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9039 // Maximum resolution images can be captured at >= 20fps
9040 // --> supporting BURST_CAPTURE
9041 return true;
9042 }
9043
9044 // Find the smallest highRes resolution, or largest resolution if there is none
9045 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9046 MAX_SIZES_CNT);
9047 size_t highRes = 0;
9048 while ((highRes + 1 < totalCnt) &&
9049 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9050 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9051 highResWidth * highResHeight)) {
9052 highRes++;
9053 }
9054 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9055 return true;
9056 } else {
9057 return false;
9058 }
9059}
9060
9061/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009062 * FUNCTION : getPDStatIndex
9063 *
9064 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9065 *
9066 * PARAMETERS :
9067 * @caps : camera capabilities
9068 *
9069 * RETURN : int32_t type
9070 * non-negative - on success
9071 * -1 - on failure
9072 *==========================================================================*/
9073int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9074 if (nullptr == caps) {
9075 return -1;
9076 }
9077
9078 uint32_t metaRawCount = caps->meta_raw_channel_count;
9079 int32_t ret = -1;
9080 for (size_t i = 0; i < metaRawCount; i++) {
9081 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9082 ret = i;
9083 break;
9084 }
9085 }
9086
9087 return ret;
9088}
9089
9090/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009091 * FUNCTION : initStaticMetadata
9092 *
9093 * DESCRIPTION: initialize the static metadata
9094 *
9095 * PARAMETERS :
9096 * @cameraId : camera Id
9097 *
9098 * RETURN : int32_t type of status
9099 * 0 -- success
9100 * non-zero failure code
9101 *==========================================================================*/
9102int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9103{
9104 int rc = 0;
9105 CameraMetadata staticInfo;
9106 size_t count = 0;
9107 bool limitedDevice = false;
9108 char prop[PROPERTY_VALUE_MAX];
9109 bool supportBurst = false;
9110
9111 supportBurst = supportBurstCapture(cameraId);
9112
9113 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9114 * guaranteed or if min fps of max resolution is less than 20 fps, its
9115 * advertised as limited device*/
9116 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9117 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9118 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9119 !supportBurst;
9120
9121 uint8_t supportedHwLvl = limitedDevice ?
9122 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009123#ifndef USE_HAL_3_3
9124 // LEVEL_3 - This device will support level 3.
9125 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9126#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009127 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009128#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009129
9130 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9131 &supportedHwLvl, 1);
9132
9133 bool facingBack = false;
9134 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9135 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9136 facingBack = true;
9137 }
9138 /*HAL 3 only*/
9139 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9140 &gCamCapability[cameraId]->min_focus_distance, 1);
9141
9142 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9143 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9144
9145 /*should be using focal lengths but sensor doesn't provide that info now*/
9146 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9147 &gCamCapability[cameraId]->focal_length,
9148 1);
9149
9150 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9151 gCamCapability[cameraId]->apertures,
9152 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9153
9154 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9155 gCamCapability[cameraId]->filter_densities,
9156 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9157
9158
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009159 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9160 size_t mode_count =
9161 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9162 for (size_t i = 0; i < mode_count; i++) {
9163 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9164 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009165 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009166 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009167
9168 int32_t lens_shading_map_size[] = {
9169 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9170 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9171 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9172 lens_shading_map_size,
9173 sizeof(lens_shading_map_size)/sizeof(int32_t));
9174
9175 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9176 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9177
9178 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9179 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9180
9181 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9182 &gCamCapability[cameraId]->max_frame_duration, 1);
9183
9184 camera_metadata_rational baseGainFactor = {
9185 gCamCapability[cameraId]->base_gain_factor.numerator,
9186 gCamCapability[cameraId]->base_gain_factor.denominator};
9187 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9188 &baseGainFactor, 1);
9189
9190 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9191 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9192
9193 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9194 gCamCapability[cameraId]->pixel_array_size.height};
9195 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9196 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9197
9198 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9199 gCamCapability[cameraId]->active_array_size.top,
9200 gCamCapability[cameraId]->active_array_size.width,
9201 gCamCapability[cameraId]->active_array_size.height};
9202 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9203 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9204
9205 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9206 &gCamCapability[cameraId]->white_level, 1);
9207
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009208 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9209 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9210 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009211 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009212 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009213
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009214#ifndef USE_HAL_3_3
9215 bool hasBlackRegions = false;
9216 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9217 LOGW("black_region_count: %d is bounded to %d",
9218 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9219 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9220 }
9221 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9222 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9223 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9224 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9225 }
9226 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9227 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9228 hasBlackRegions = true;
9229 }
9230#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009231 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9232 &gCamCapability[cameraId]->flash_charge_duration, 1);
9233
9234 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9235 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9236
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009237 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9238 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9239 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009240 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9241 &timestampSource, 1);
9242
Thierry Strudel54dc9782017-02-15 12:12:10 -08009243 //update histogram vendor data
9244 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009245 &gCamCapability[cameraId]->histogram_size, 1);
9246
Thierry Strudel54dc9782017-02-15 12:12:10 -08009247 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009248 &gCamCapability[cameraId]->max_histogram_count, 1);
9249
Shuzhen Wang14415f52016-11-16 18:26:18 -08009250 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9251 //so that app can request fewer number of bins than the maximum supported.
9252 std::vector<int32_t> histBins;
9253 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9254 histBins.push_back(maxHistBins);
9255 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9256 (maxHistBins & 0x1) == 0) {
9257 histBins.push_back(maxHistBins >> 1);
9258 maxHistBins >>= 1;
9259 }
9260 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9261 histBins.data(), histBins.size());
9262
Thierry Strudel3d639192016-09-09 11:52:26 -07009263 int32_t sharpness_map_size[] = {
9264 gCamCapability[cameraId]->sharpness_map_size.width,
9265 gCamCapability[cameraId]->sharpness_map_size.height};
9266
9267 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9268 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9269
9270 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9271 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9272
Emilian Peev0f3c3162017-03-15 12:57:46 +00009273 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9274 if (0 <= indexPD) {
9275 // Advertise PD stats data as part of the Depth capabilities
9276 int32_t depthWidth =
9277 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9278 int32_t depthHeight =
9279 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9280 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9281 assert(0 < depthSamplesCount);
9282 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9283 &depthSamplesCount, 1);
9284
9285 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9286 depthHeight,
9287 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9288 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9289 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9290 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9291 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9292
9293 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9294 depthHeight, 33333333,
9295 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9296 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9297 depthMinDuration,
9298 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9299
9300 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9301 depthHeight, 0,
9302 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9303 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9304 depthStallDuration,
9305 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9306
9307 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9308 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9309 }
9310
Thierry Strudel3d639192016-09-09 11:52:26 -07009311 int32_t scalar_formats[] = {
9312 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9313 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9314 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9315 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9316 HAL_PIXEL_FORMAT_RAW10,
9317 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009318 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9319 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9320 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009321
9322 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9323 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9324 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9325 count, MAX_SIZES_CNT, available_processed_sizes);
9326 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9327 available_processed_sizes, count * 2);
9328
9329 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9330 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9331 makeTable(gCamCapability[cameraId]->raw_dim,
9332 count, MAX_SIZES_CNT, available_raw_sizes);
9333 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9334 available_raw_sizes, count * 2);
9335
9336 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9337 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9338 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9339 count, MAX_SIZES_CNT, available_fps_ranges);
9340 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9341 available_fps_ranges, count * 2);
9342
9343 camera_metadata_rational exposureCompensationStep = {
9344 gCamCapability[cameraId]->exp_compensation_step.numerator,
9345 gCamCapability[cameraId]->exp_compensation_step.denominator};
9346 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9347 &exposureCompensationStep, 1);
9348
9349 Vector<uint8_t> availableVstabModes;
9350 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9351 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009352 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009353 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009354 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009355 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009356 count = IS_TYPE_MAX;
9357 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9358 for (size_t i = 0; i < count; i++) {
9359 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9360 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9361 eisSupported = true;
9362 break;
9363 }
9364 }
9365 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009366 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9367 }
9368 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9369 availableVstabModes.array(), availableVstabModes.size());
9370
9371 /*HAL 1 and HAL 3 common*/
9372 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9373 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9374 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009375 // Cap the max zoom to the max preferred value
9376 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009377 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9378 &maxZoom, 1);
9379
9380 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9381 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9382
9383 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9384 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9385 max3aRegions[2] = 0; /* AF not supported */
9386 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9387 max3aRegions, 3);
9388
9389 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9390 memset(prop, 0, sizeof(prop));
9391 property_get("persist.camera.facedetect", prop, "1");
9392 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9393 LOGD("Support face detection mode: %d",
9394 supportedFaceDetectMode);
9395
9396 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009397 /* support mode should be OFF if max number of face is 0 */
9398 if (maxFaces <= 0) {
9399 supportedFaceDetectMode = 0;
9400 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009401 Vector<uint8_t> availableFaceDetectModes;
9402 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9403 if (supportedFaceDetectMode == 1) {
9404 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9405 } else if (supportedFaceDetectMode == 2) {
9406 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9407 } else if (supportedFaceDetectMode == 3) {
9408 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9409 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9410 } else {
9411 maxFaces = 0;
9412 }
9413 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9414 availableFaceDetectModes.array(),
9415 availableFaceDetectModes.size());
9416 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9417 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009418 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9419 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9420 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009421
9422 int32_t exposureCompensationRange[] = {
9423 gCamCapability[cameraId]->exposure_compensation_min,
9424 gCamCapability[cameraId]->exposure_compensation_max};
9425 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9426 exposureCompensationRange,
9427 sizeof(exposureCompensationRange)/sizeof(int32_t));
9428
9429 uint8_t lensFacing = (facingBack) ?
9430 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9431 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9432
9433 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9434 available_thumbnail_sizes,
9435 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9436
9437 /*all sizes will be clubbed into this tag*/
9438 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9439 /*android.scaler.availableStreamConfigurations*/
9440 Vector<int32_t> available_stream_configs;
9441 cam_dimension_t active_array_dim;
9442 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9443 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009444
9445 /*advertise list of input dimensions supported based on below property.
9446 By default all sizes upto 5MP will be advertised.
9447 Note that the setprop resolution format should be WxH.
9448 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9449 To list all supported sizes, setprop needs to be set with "0x0" */
9450 cam_dimension_t minInputSize = {2592,1944}; //5MP
9451 memset(prop, 0, sizeof(prop));
9452 property_get("persist.camera.input.minsize", prop, "2592x1944");
9453 if (strlen(prop) > 0) {
9454 char *saveptr = NULL;
9455 char *token = strtok_r(prop, "x", &saveptr);
9456 if (token != NULL) {
9457 minInputSize.width = atoi(token);
9458 }
9459 token = strtok_r(NULL, "x", &saveptr);
9460 if (token != NULL) {
9461 minInputSize.height = atoi(token);
9462 }
9463 }
9464
Thierry Strudel3d639192016-09-09 11:52:26 -07009465 /* Add input/output stream configurations for each scalar formats*/
9466 for (size_t j = 0; j < scalar_formats_count; j++) {
9467 switch (scalar_formats[j]) {
9468 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9469 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9470 case HAL_PIXEL_FORMAT_RAW10:
9471 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9472 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9473 addStreamConfig(available_stream_configs, scalar_formats[j],
9474 gCamCapability[cameraId]->raw_dim[i],
9475 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9476 }
9477 break;
9478 case HAL_PIXEL_FORMAT_BLOB:
9479 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9480 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9481 addStreamConfig(available_stream_configs, scalar_formats[j],
9482 gCamCapability[cameraId]->picture_sizes_tbl[i],
9483 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9484 }
9485 break;
9486 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9487 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9488 default:
9489 cam_dimension_t largest_picture_size;
9490 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9491 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9492 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9493 addStreamConfig(available_stream_configs, scalar_formats[j],
9494 gCamCapability[cameraId]->picture_sizes_tbl[i],
9495 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009496 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009497 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9498 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009499 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9500 >= minInputSize.width) || (gCamCapability[cameraId]->
9501 picture_sizes_tbl[i].height >= minInputSize.height)) {
9502 addStreamConfig(available_stream_configs, scalar_formats[j],
9503 gCamCapability[cameraId]->picture_sizes_tbl[i],
9504 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9505 }
9506 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009507 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009508
Thierry Strudel3d639192016-09-09 11:52:26 -07009509 break;
9510 }
9511 }
9512
9513 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9514 available_stream_configs.array(), available_stream_configs.size());
9515 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9516 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9517
9518 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9519 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9520
9521 /* android.scaler.availableMinFrameDurations */
9522 Vector<int64_t> available_min_durations;
9523 for (size_t j = 0; j < scalar_formats_count; j++) {
9524 switch (scalar_formats[j]) {
9525 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9526 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9527 case HAL_PIXEL_FORMAT_RAW10:
9528 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9529 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9530 available_min_durations.add(scalar_formats[j]);
9531 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9532 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9533 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9534 }
9535 break;
9536 default:
9537 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9538 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9539 available_min_durations.add(scalar_formats[j]);
9540 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9541 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9542 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9543 }
9544 break;
9545 }
9546 }
9547 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9548 available_min_durations.array(), available_min_durations.size());
9549
9550 Vector<int32_t> available_hfr_configs;
9551 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9552 int32_t fps = 0;
9553 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9554 case CAM_HFR_MODE_60FPS:
9555 fps = 60;
9556 break;
9557 case CAM_HFR_MODE_90FPS:
9558 fps = 90;
9559 break;
9560 case CAM_HFR_MODE_120FPS:
9561 fps = 120;
9562 break;
9563 case CAM_HFR_MODE_150FPS:
9564 fps = 150;
9565 break;
9566 case CAM_HFR_MODE_180FPS:
9567 fps = 180;
9568 break;
9569 case CAM_HFR_MODE_210FPS:
9570 fps = 210;
9571 break;
9572 case CAM_HFR_MODE_240FPS:
9573 fps = 240;
9574 break;
9575 case CAM_HFR_MODE_480FPS:
9576 fps = 480;
9577 break;
9578 case CAM_HFR_MODE_OFF:
9579 case CAM_HFR_MODE_MAX:
9580 default:
9581 break;
9582 }
9583
9584 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9585 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9586 /* For each HFR frame rate, need to advertise one variable fps range
9587 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9588 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9589 * set by the app. When video recording is started, [120, 120] is
9590 * set. This way sensor configuration does not change when recording
9591 * is started */
9592
9593 /* (width, height, fps_min, fps_max, batch_size_max) */
9594 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9595 j < MAX_SIZES_CNT; j++) {
9596 available_hfr_configs.add(
9597 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9598 available_hfr_configs.add(
9599 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9600 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9601 available_hfr_configs.add(fps);
9602 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9603
9604 /* (width, height, fps_min, fps_max, batch_size_max) */
9605 available_hfr_configs.add(
9606 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9607 available_hfr_configs.add(
9608 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9609 available_hfr_configs.add(fps);
9610 available_hfr_configs.add(fps);
9611 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9612 }
9613 }
9614 }
9615 //Advertise HFR capability only if the property is set
9616 memset(prop, 0, sizeof(prop));
9617 property_get("persist.camera.hal3hfr.enable", prop, "1");
9618 uint8_t hfrEnable = (uint8_t)atoi(prop);
9619
9620 if(hfrEnable && available_hfr_configs.array()) {
9621 staticInfo.update(
9622 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9623 available_hfr_configs.array(), available_hfr_configs.size());
9624 }
9625
9626 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9627 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9628 &max_jpeg_size, 1);
9629
9630 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9631 size_t size = 0;
9632 count = CAM_EFFECT_MODE_MAX;
9633 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9634 for (size_t i = 0; i < count; i++) {
9635 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9636 gCamCapability[cameraId]->supported_effects[i]);
9637 if (NAME_NOT_FOUND != val) {
9638 avail_effects[size] = (uint8_t)val;
9639 size++;
9640 }
9641 }
9642 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9643 avail_effects,
9644 size);
9645
9646 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9647 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9648 size_t supported_scene_modes_cnt = 0;
9649 count = CAM_SCENE_MODE_MAX;
9650 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9651 for (size_t i = 0; i < count; i++) {
9652 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9653 CAM_SCENE_MODE_OFF) {
9654 int val = lookupFwkName(SCENE_MODES_MAP,
9655 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9656 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009657
Thierry Strudel3d639192016-09-09 11:52:26 -07009658 if (NAME_NOT_FOUND != val) {
9659 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9660 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9661 supported_scene_modes_cnt++;
9662 }
9663 }
9664 }
9665 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9666 avail_scene_modes,
9667 supported_scene_modes_cnt);
9668
9669 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9670 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9671 supported_scene_modes_cnt,
9672 CAM_SCENE_MODE_MAX,
9673 scene_mode_overrides,
9674 supported_indexes,
9675 cameraId);
9676
9677 if (supported_scene_modes_cnt == 0) {
9678 supported_scene_modes_cnt = 1;
9679 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9680 }
9681
9682 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9683 scene_mode_overrides, supported_scene_modes_cnt * 3);
9684
9685 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9686 ANDROID_CONTROL_MODE_AUTO,
9687 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9688 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9689 available_control_modes,
9690 3);
9691
9692 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9693 size = 0;
9694 count = CAM_ANTIBANDING_MODE_MAX;
9695 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9696 for (size_t i = 0; i < count; i++) {
9697 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9698 gCamCapability[cameraId]->supported_antibandings[i]);
9699 if (NAME_NOT_FOUND != val) {
9700 avail_antibanding_modes[size] = (uint8_t)val;
9701 size++;
9702 }
9703
9704 }
9705 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9706 avail_antibanding_modes,
9707 size);
9708
9709 uint8_t avail_abberation_modes[] = {
9710 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9711 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9712 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9713 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9714 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9715 if (0 == count) {
9716 // If no aberration correction modes are available for a device, this advertise OFF mode
9717 size = 1;
9718 } else {
9719 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9720 // So, advertize all 3 modes if atleast any one mode is supported as per the
9721 // new M requirement
9722 size = 3;
9723 }
9724 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9725 avail_abberation_modes,
9726 size);
9727
9728 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9729 size = 0;
9730 count = CAM_FOCUS_MODE_MAX;
9731 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9732 for (size_t i = 0; i < count; i++) {
9733 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9734 gCamCapability[cameraId]->supported_focus_modes[i]);
9735 if (NAME_NOT_FOUND != val) {
9736 avail_af_modes[size] = (uint8_t)val;
9737 size++;
9738 }
9739 }
9740 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9741 avail_af_modes,
9742 size);
9743
9744 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9745 size = 0;
9746 count = CAM_WB_MODE_MAX;
9747 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9748 for (size_t i = 0; i < count; i++) {
9749 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9750 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9751 gCamCapability[cameraId]->supported_white_balances[i]);
9752 if (NAME_NOT_FOUND != val) {
9753 avail_awb_modes[size] = (uint8_t)val;
9754 size++;
9755 }
9756 }
9757 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9758 avail_awb_modes,
9759 size);
9760
9761 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9762 count = CAM_FLASH_FIRING_LEVEL_MAX;
9763 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9764 count);
9765 for (size_t i = 0; i < count; i++) {
9766 available_flash_levels[i] =
9767 gCamCapability[cameraId]->supported_firing_levels[i];
9768 }
9769 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9770 available_flash_levels, count);
9771
9772 uint8_t flashAvailable;
9773 if (gCamCapability[cameraId]->flash_available)
9774 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9775 else
9776 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9777 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9778 &flashAvailable, 1);
9779
9780 Vector<uint8_t> avail_ae_modes;
9781 count = CAM_AE_MODE_MAX;
9782 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9783 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009784 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9785 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9786 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9787 }
9788 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009789 }
9790 if (flashAvailable) {
9791 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9792 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9793 }
9794 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9795 avail_ae_modes.array(),
9796 avail_ae_modes.size());
9797
9798 int32_t sensitivity_range[2];
9799 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9800 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9801 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9802 sensitivity_range,
9803 sizeof(sensitivity_range) / sizeof(int32_t));
9804
9805 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9806 &gCamCapability[cameraId]->max_analog_sensitivity,
9807 1);
9808
9809 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9810 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9811 &sensor_orientation,
9812 1);
9813
9814 int32_t max_output_streams[] = {
9815 MAX_STALLING_STREAMS,
9816 MAX_PROCESSED_STREAMS,
9817 MAX_RAW_STREAMS};
9818 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9819 max_output_streams,
9820 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9821
9822 uint8_t avail_leds = 0;
9823 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9824 &avail_leds, 0);
9825
9826 uint8_t focus_dist_calibrated;
9827 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9828 gCamCapability[cameraId]->focus_dist_calibrated);
9829 if (NAME_NOT_FOUND != val) {
9830 focus_dist_calibrated = (uint8_t)val;
9831 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9832 &focus_dist_calibrated, 1);
9833 }
9834
9835 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9836 size = 0;
9837 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9838 MAX_TEST_PATTERN_CNT);
9839 for (size_t i = 0; i < count; i++) {
9840 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9841 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9842 if (NAME_NOT_FOUND != testpatternMode) {
9843 avail_testpattern_modes[size] = testpatternMode;
9844 size++;
9845 }
9846 }
9847 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9848 avail_testpattern_modes,
9849 size);
9850
9851 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9852 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9853 &max_pipeline_depth,
9854 1);
9855
9856 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9857 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9858 &partial_result_count,
9859 1);
9860
9861 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9862 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9863
9864 Vector<uint8_t> available_capabilities;
9865 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9866 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9867 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9868 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9869 if (supportBurst) {
9870 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9871 }
9872 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9873 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9874 if (hfrEnable && available_hfr_configs.array()) {
9875 available_capabilities.add(
9876 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9877 }
9878
9879 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9880 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9881 }
9882 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9883 available_capabilities.array(),
9884 available_capabilities.size());
9885
9886 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9887 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9888 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9889 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9890
9891 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9892 &aeLockAvailable, 1);
9893
9894 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9895 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9896 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9897 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9898
9899 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9900 &awbLockAvailable, 1);
9901
9902 int32_t max_input_streams = 1;
9903 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9904 &max_input_streams,
9905 1);
9906
9907 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9908 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9909 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9910 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9911 HAL_PIXEL_FORMAT_YCbCr_420_888};
9912 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9913 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9914
9915 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9916 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9917 &max_latency,
9918 1);
9919
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009920#ifndef USE_HAL_3_3
9921 int32_t isp_sensitivity_range[2];
9922 isp_sensitivity_range[0] =
9923 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9924 isp_sensitivity_range[1] =
9925 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9926 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9927 isp_sensitivity_range,
9928 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9929#endif
9930
Thierry Strudel3d639192016-09-09 11:52:26 -07009931 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9932 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9933 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9934 available_hot_pixel_modes,
9935 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9936
9937 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9938 ANDROID_SHADING_MODE_FAST,
9939 ANDROID_SHADING_MODE_HIGH_QUALITY};
9940 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9941 available_shading_modes,
9942 3);
9943
9944 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9945 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9946 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9947 available_lens_shading_map_modes,
9948 2);
9949
9950 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9951 ANDROID_EDGE_MODE_FAST,
9952 ANDROID_EDGE_MODE_HIGH_QUALITY,
9953 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9954 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9955 available_edge_modes,
9956 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9957
9958 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9959 ANDROID_NOISE_REDUCTION_MODE_FAST,
9960 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9961 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9962 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9963 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9964 available_noise_red_modes,
9965 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9966
9967 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9968 ANDROID_TONEMAP_MODE_FAST,
9969 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9970 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9971 available_tonemap_modes,
9972 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9973
9974 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9975 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9976 available_hot_pixel_map_modes,
9977 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9978
9979 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9980 gCamCapability[cameraId]->reference_illuminant1);
9981 if (NAME_NOT_FOUND != val) {
9982 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9983 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9984 }
9985
9986 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9987 gCamCapability[cameraId]->reference_illuminant2);
9988 if (NAME_NOT_FOUND != val) {
9989 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9990 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9991 }
9992
9993 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9994 (void *)gCamCapability[cameraId]->forward_matrix1,
9995 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9996
9997 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9998 (void *)gCamCapability[cameraId]->forward_matrix2,
9999 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10000
10001 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10002 (void *)gCamCapability[cameraId]->color_transform1,
10003 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10004
10005 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10006 (void *)gCamCapability[cameraId]->color_transform2,
10007 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10008
10009 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10010 (void *)gCamCapability[cameraId]->calibration_transform1,
10011 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10012
10013 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10014 (void *)gCamCapability[cameraId]->calibration_transform2,
10015 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10016
10017 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10018 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10019 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10020 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10021 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10022 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10023 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10024 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10025 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10026 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10027 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10028 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10029 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10030 ANDROID_JPEG_GPS_COORDINATES,
10031 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10032 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10033 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10034 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10035 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10036 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10037 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10038 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10039 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10040 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010041#ifndef USE_HAL_3_3
10042 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10043#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010044 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010045 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010046 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10047 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010048 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010049 /* DevCamDebug metadata request_keys_basic */
10050 DEVCAMDEBUG_META_ENABLE,
10051 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010052 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010053 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010054 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010055 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -080010056 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010057
10058 size_t request_keys_cnt =
10059 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10060 Vector<int32_t> available_request_keys;
10061 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10062 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10063 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10064 }
10065
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010066 if (gExposeEnableZslKey) {
Chien-Yu Chened0a4c92017-05-01 18:25:03 +000010067 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010068 }
10069
Thierry Strudel3d639192016-09-09 11:52:26 -070010070 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10071 available_request_keys.array(), available_request_keys.size());
10072
10073 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10074 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10075 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10076 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10077 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10078 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10079 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10080 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10081 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10082 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10083 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10084 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10085 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10086 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10087 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10088 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10089 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010090 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010091 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10092 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10093 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010094 ANDROID_STATISTICS_FACE_SCORES,
10095#ifndef USE_HAL_3_3
10096 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10097#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010098 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010099 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010100 // DevCamDebug metadata result_keys_basic
10101 DEVCAMDEBUG_META_ENABLE,
10102 // DevCamDebug metadata result_keys AF
10103 DEVCAMDEBUG_AF_LENS_POSITION,
10104 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10105 DEVCAMDEBUG_AF_TOF_DISTANCE,
10106 DEVCAMDEBUG_AF_LUMA,
10107 DEVCAMDEBUG_AF_HAF_STATE,
10108 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10109 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10110 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10111 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10112 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10113 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10114 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10115 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10116 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10117 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10118 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10119 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10120 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10121 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10122 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10123 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10124 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10125 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10126 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10127 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10128 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10129 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10130 // DevCamDebug metadata result_keys AEC
10131 DEVCAMDEBUG_AEC_TARGET_LUMA,
10132 DEVCAMDEBUG_AEC_COMP_LUMA,
10133 DEVCAMDEBUG_AEC_AVG_LUMA,
10134 DEVCAMDEBUG_AEC_CUR_LUMA,
10135 DEVCAMDEBUG_AEC_LINECOUNT,
10136 DEVCAMDEBUG_AEC_REAL_GAIN,
10137 DEVCAMDEBUG_AEC_EXP_INDEX,
10138 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010139 // DevCamDebug metadata result_keys zzHDR
10140 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10141 DEVCAMDEBUG_AEC_L_LINECOUNT,
10142 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10143 DEVCAMDEBUG_AEC_S_LINECOUNT,
10144 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10145 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10146 // DevCamDebug metadata result_keys ADRC
10147 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10148 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10149 DEVCAMDEBUG_AEC_GTM_RATIO,
10150 DEVCAMDEBUG_AEC_LTM_RATIO,
10151 DEVCAMDEBUG_AEC_LA_RATIO,
10152 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010153 // DevCamDebug metadata result_keys AWB
10154 DEVCAMDEBUG_AWB_R_GAIN,
10155 DEVCAMDEBUG_AWB_G_GAIN,
10156 DEVCAMDEBUG_AWB_B_GAIN,
10157 DEVCAMDEBUG_AWB_CCT,
10158 DEVCAMDEBUG_AWB_DECISION,
10159 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010160 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10161 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10162 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010163 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010164 };
10165
Thierry Strudel3d639192016-09-09 11:52:26 -070010166 size_t result_keys_cnt =
10167 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10168
10169 Vector<int32_t> available_result_keys;
10170 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10171 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10172 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10173 }
10174 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10175 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10176 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10177 }
10178 if (supportedFaceDetectMode == 1) {
10179 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10180 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10181 } else if ((supportedFaceDetectMode == 2) ||
10182 (supportedFaceDetectMode == 3)) {
10183 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10184 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10185 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010186#ifndef USE_HAL_3_3
10187 if (hasBlackRegions) {
10188 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10189 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10190 }
10191#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010192
10193 if (gExposeEnableZslKey) {
10194 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10195 }
10196
Thierry Strudel3d639192016-09-09 11:52:26 -070010197 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10198 available_result_keys.array(), available_result_keys.size());
10199
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010200 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010201 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10202 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10203 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10204 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10205 ANDROID_SCALER_CROPPING_TYPE,
10206 ANDROID_SYNC_MAX_LATENCY,
10207 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10208 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10209 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10210 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10211 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10212 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10213 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10214 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10215 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10216 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10217 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10218 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10219 ANDROID_LENS_FACING,
10220 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10221 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10222 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10223 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10224 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10225 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10226 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10227 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10228 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10229 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10230 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10231 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10232 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10233 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10234 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10235 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10236 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10237 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10238 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10239 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010240 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010241 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10242 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10243 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10244 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10245 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10246 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10247 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10248 ANDROID_CONTROL_AVAILABLE_MODES,
10249 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10250 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10251 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10252 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010253 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10254#ifndef USE_HAL_3_3
10255 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10256 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10257#endif
10258 };
10259
10260 Vector<int32_t> available_characteristics_keys;
10261 available_characteristics_keys.appendArray(characteristics_keys_basic,
10262 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10263#ifndef USE_HAL_3_3
10264 if (hasBlackRegions) {
10265 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10266 }
10267#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010268
10269 if (0 <= indexPD) {
10270 int32_t depthKeys[] = {
10271 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10272 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10273 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10274 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10275 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10276 };
10277 available_characteristics_keys.appendArray(depthKeys,
10278 sizeof(depthKeys) / sizeof(depthKeys[0]));
10279 }
10280
Thierry Strudel3d639192016-09-09 11:52:26 -070010281 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010282 available_characteristics_keys.array(),
10283 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010284
10285 /*available stall durations depend on the hw + sw and will be different for different devices */
10286 /*have to add for raw after implementation*/
10287 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10288 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10289
10290 Vector<int64_t> available_stall_durations;
10291 for (uint32_t j = 0; j < stall_formats_count; j++) {
10292 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10293 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10294 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10295 available_stall_durations.add(stall_formats[j]);
10296 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10297 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10298 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10299 }
10300 } else {
10301 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10302 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10303 available_stall_durations.add(stall_formats[j]);
10304 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10305 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10306 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10307 }
10308 }
10309 }
10310 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10311 available_stall_durations.array(),
10312 available_stall_durations.size());
10313
10314 //QCAMERA3_OPAQUE_RAW
10315 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10316 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10317 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10318 case LEGACY_RAW:
10319 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10320 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10321 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10322 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10323 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10324 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10325 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10326 break;
10327 case MIPI_RAW:
10328 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10329 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10330 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10331 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10332 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10333 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10334 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10335 break;
10336 default:
10337 LOGE("unknown opaque_raw_format %d",
10338 gCamCapability[cameraId]->opaque_raw_fmt);
10339 break;
10340 }
10341 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10342
10343 Vector<int32_t> strides;
10344 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10345 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10346 cam_stream_buf_plane_info_t buf_planes;
10347 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10348 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10349 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10350 &gCamCapability[cameraId]->padding_info, &buf_planes);
10351 strides.add(buf_planes.plane_info.mp[0].stride);
10352 }
10353 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10354 strides.size());
10355
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010356 //TBD: remove the following line once backend advertises zzHDR in feature mask
10357 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010358 //Video HDR default
10359 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10360 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010361 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010362 int32_t vhdr_mode[] = {
10363 QCAMERA3_VIDEO_HDR_MODE_OFF,
10364 QCAMERA3_VIDEO_HDR_MODE_ON};
10365
10366 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10367 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10368 vhdr_mode, vhdr_mode_count);
10369 }
10370
Thierry Strudel3d639192016-09-09 11:52:26 -070010371 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10372 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10373 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10374
10375 uint8_t isMonoOnly =
10376 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10377 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10378 &isMonoOnly, 1);
10379
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010380#ifndef USE_HAL_3_3
10381 Vector<int32_t> opaque_size;
10382 for (size_t j = 0; j < scalar_formats_count; j++) {
10383 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10384 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10385 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10386 cam_stream_buf_plane_info_t buf_planes;
10387
10388 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10389 &gCamCapability[cameraId]->padding_info, &buf_planes);
10390
10391 if (rc == 0) {
10392 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10393 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10394 opaque_size.add(buf_planes.plane_info.frame_len);
10395 }else {
10396 LOGE("raw frame calculation failed!");
10397 }
10398 }
10399 }
10400 }
10401
10402 if ((opaque_size.size() > 0) &&
10403 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10404 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10405 else
10406 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10407#endif
10408
Thierry Strudel04e026f2016-10-10 11:27:36 -070010409 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10410 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10411 size = 0;
10412 count = CAM_IR_MODE_MAX;
10413 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10414 for (size_t i = 0; i < count; i++) {
10415 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10416 gCamCapability[cameraId]->supported_ir_modes[i]);
10417 if (NAME_NOT_FOUND != val) {
10418 avail_ir_modes[size] = (int32_t)val;
10419 size++;
10420 }
10421 }
10422 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10423 avail_ir_modes, size);
10424 }
10425
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010426 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10427 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10428 size = 0;
10429 count = CAM_AEC_CONVERGENCE_MAX;
10430 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10431 for (size_t i = 0; i < count; i++) {
10432 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10433 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10434 if (NAME_NOT_FOUND != val) {
10435 available_instant_aec_modes[size] = (int32_t)val;
10436 size++;
10437 }
10438 }
10439 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10440 available_instant_aec_modes, size);
10441 }
10442
Thierry Strudel54dc9782017-02-15 12:12:10 -080010443 int32_t sharpness_range[] = {
10444 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10445 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10446 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10447
10448 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10449 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10450 size = 0;
10451 count = CAM_BINNING_CORRECTION_MODE_MAX;
10452 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10453 for (size_t i = 0; i < count; i++) {
10454 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10455 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10456 gCamCapability[cameraId]->supported_binning_modes[i]);
10457 if (NAME_NOT_FOUND != val) {
10458 avail_binning_modes[size] = (int32_t)val;
10459 size++;
10460 }
10461 }
10462 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10463 avail_binning_modes, size);
10464 }
10465
10466 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10467 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10468 size = 0;
10469 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10470 for (size_t i = 0; i < count; i++) {
10471 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10472 gCamCapability[cameraId]->supported_aec_modes[i]);
10473 if (NAME_NOT_FOUND != val)
10474 available_aec_modes[size++] = val;
10475 }
10476 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10477 available_aec_modes, size);
10478 }
10479
10480 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10481 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10482 size = 0;
10483 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10484 for (size_t i = 0; i < count; i++) {
10485 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10486 gCamCapability[cameraId]->supported_iso_modes[i]);
10487 if (NAME_NOT_FOUND != val)
10488 available_iso_modes[size++] = val;
10489 }
10490 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10491 available_iso_modes, size);
10492 }
10493
10494 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010495 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010496 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10497 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10498 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10499
10500 int32_t available_saturation_range[4];
10501 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10502 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10503 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10504 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10505 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10506 available_saturation_range, 4);
10507
10508 uint8_t is_hdr_values[2];
10509 is_hdr_values[0] = 0;
10510 is_hdr_values[1] = 1;
10511 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10512 is_hdr_values, 2);
10513
10514 float is_hdr_confidence_range[2];
10515 is_hdr_confidence_range[0] = 0.0;
10516 is_hdr_confidence_range[1] = 1.0;
10517 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10518 is_hdr_confidence_range, 2);
10519
Emilian Peev0a972ef2017-03-16 10:25:53 +000010520 size_t eepromLength = strnlen(
10521 reinterpret_cast<const char *>(
10522 gCamCapability[cameraId]->eeprom_version_info),
10523 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10524 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010525 char easelInfo[] = ",E:N";
10526 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10527 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10528 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010529 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10530 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010531 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010532 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10533 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10534 }
10535
Thierry Strudel3d639192016-09-09 11:52:26 -070010536 gStaticMetadata[cameraId] = staticInfo.release();
10537 return rc;
10538}
10539
10540/*===========================================================================
10541 * FUNCTION : makeTable
10542 *
10543 * DESCRIPTION: make a table of sizes
10544 *
10545 * PARAMETERS :
10546 *
10547 *
10548 *==========================================================================*/
10549void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10550 size_t max_size, int32_t *sizeTable)
10551{
10552 size_t j = 0;
10553 if (size > max_size) {
10554 size = max_size;
10555 }
10556 for (size_t i = 0; i < size; i++) {
10557 sizeTable[j] = dimTable[i].width;
10558 sizeTable[j+1] = dimTable[i].height;
10559 j+=2;
10560 }
10561}
10562
10563/*===========================================================================
10564 * FUNCTION : makeFPSTable
10565 *
10566 * DESCRIPTION: make a table of fps ranges
10567 *
10568 * PARAMETERS :
10569 *
10570 *==========================================================================*/
10571void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10572 size_t max_size, int32_t *fpsRangesTable)
10573{
10574 size_t j = 0;
10575 if (size > max_size) {
10576 size = max_size;
10577 }
10578 for (size_t i = 0; i < size; i++) {
10579 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10580 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10581 j+=2;
10582 }
10583}
10584
10585/*===========================================================================
10586 * FUNCTION : makeOverridesList
10587 *
10588 * DESCRIPTION: make a list of scene mode overrides
10589 *
10590 * PARAMETERS :
10591 *
10592 *
10593 *==========================================================================*/
10594void QCamera3HardwareInterface::makeOverridesList(
10595 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10596 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10597{
10598 /*daemon will give a list of overrides for all scene modes.
10599 However we should send the fwk only the overrides for the scene modes
10600 supported by the framework*/
10601 size_t j = 0;
10602 if (size > max_size) {
10603 size = max_size;
10604 }
10605 size_t focus_count = CAM_FOCUS_MODE_MAX;
10606 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10607 focus_count);
10608 for (size_t i = 0; i < size; i++) {
10609 bool supt = false;
10610 size_t index = supported_indexes[i];
10611 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10612 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10613 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10614 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10615 overridesTable[index].awb_mode);
10616 if (NAME_NOT_FOUND != val) {
10617 overridesList[j+1] = (uint8_t)val;
10618 }
10619 uint8_t focus_override = overridesTable[index].af_mode;
10620 for (size_t k = 0; k < focus_count; k++) {
10621 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10622 supt = true;
10623 break;
10624 }
10625 }
10626 if (supt) {
10627 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10628 focus_override);
10629 if (NAME_NOT_FOUND != val) {
10630 overridesList[j+2] = (uint8_t)val;
10631 }
10632 } else {
10633 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10634 }
10635 j+=3;
10636 }
10637}
10638
10639/*===========================================================================
10640 * FUNCTION : filterJpegSizes
10641 *
10642 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10643 * could be downscaled to
10644 *
10645 * PARAMETERS :
10646 *
10647 * RETURN : length of jpegSizes array
10648 *==========================================================================*/
10649
10650size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10651 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10652 uint8_t downscale_factor)
10653{
10654 if (0 == downscale_factor) {
10655 downscale_factor = 1;
10656 }
10657
10658 int32_t min_width = active_array_size.width / downscale_factor;
10659 int32_t min_height = active_array_size.height / downscale_factor;
10660 size_t jpegSizesCnt = 0;
10661 if (processedSizesCnt > maxCount) {
10662 processedSizesCnt = maxCount;
10663 }
10664 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10665 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10666 jpegSizes[jpegSizesCnt] = processedSizes[i];
10667 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10668 jpegSizesCnt += 2;
10669 }
10670 }
10671 return jpegSizesCnt;
10672}
10673
10674/*===========================================================================
10675 * FUNCTION : computeNoiseModelEntryS
10676 *
10677 * DESCRIPTION: function to map a given sensitivity to the S noise
10678 * model parameters in the DNG noise model.
10679 *
10680 * PARAMETERS : sens : the sensor sensitivity
10681 *
10682 ** RETURN : S (sensor amplification) noise
10683 *
10684 *==========================================================================*/
10685double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10686 double s = gCamCapability[mCameraId]->gradient_S * sens +
10687 gCamCapability[mCameraId]->offset_S;
10688 return ((s < 0.0) ? 0.0 : s);
10689}
10690
10691/*===========================================================================
10692 * FUNCTION : computeNoiseModelEntryO
10693 *
10694 * DESCRIPTION: function to map a given sensitivity to the O noise
10695 * model parameters in the DNG noise model.
10696 *
10697 * PARAMETERS : sens : the sensor sensitivity
10698 *
10699 ** RETURN : O (sensor readout) noise
10700 *
10701 *==========================================================================*/
10702double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10703 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10704 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10705 1.0 : (1.0 * sens / max_analog_sens);
10706 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10707 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10708 return ((o < 0.0) ? 0.0 : o);
10709}
10710
10711/*===========================================================================
10712 * FUNCTION : getSensorSensitivity
10713 *
10714 * DESCRIPTION: convert iso_mode to an integer value
10715 *
10716 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10717 *
10718 ** RETURN : sensitivity supported by sensor
10719 *
10720 *==========================================================================*/
10721int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10722{
10723 int32_t sensitivity;
10724
10725 switch (iso_mode) {
10726 case CAM_ISO_MODE_100:
10727 sensitivity = 100;
10728 break;
10729 case CAM_ISO_MODE_200:
10730 sensitivity = 200;
10731 break;
10732 case CAM_ISO_MODE_400:
10733 sensitivity = 400;
10734 break;
10735 case CAM_ISO_MODE_800:
10736 sensitivity = 800;
10737 break;
10738 case CAM_ISO_MODE_1600:
10739 sensitivity = 1600;
10740 break;
10741 default:
10742 sensitivity = -1;
10743 break;
10744 }
10745 return sensitivity;
10746}
10747
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010748int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010749 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010750 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10751 // to connect to Easel.
10752 bool doNotpowerOnEasel =
10753 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10754
10755 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010756 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10757 return OK;
10758 }
10759
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010760 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010761 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010762 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010763 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010764 return res;
10765 }
10766
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010767 EaselManagerClientOpened = true;
10768
10769 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010770 if (res != OK) {
10771 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10772 }
10773
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010774 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010775 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010776
10777 // Expose enableZsl key only when HDR+ mode is enabled.
10778 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010779 }
10780
10781 return OK;
10782}
10783
Thierry Strudel3d639192016-09-09 11:52:26 -070010784/*===========================================================================
10785 * FUNCTION : getCamInfo
10786 *
10787 * DESCRIPTION: query camera capabilities
10788 *
10789 * PARAMETERS :
10790 * @cameraId : camera Id
10791 * @info : camera info struct to be filled in with camera capabilities
10792 *
10793 * RETURN : int type of status
10794 * NO_ERROR -- success
10795 * none-zero failure code
10796 *==========================================================================*/
10797int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10798 struct camera_info *info)
10799{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010800 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010801 int rc = 0;
10802
10803 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010804
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010805 {
10806 Mutex::Autolock l(gHdrPlusClientLock);
10807 rc = initHdrPlusClientLocked();
10808 if (rc != OK) {
10809 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10810 pthread_mutex_unlock(&gCamLock);
10811 return rc;
10812 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010813 }
10814
Thierry Strudel3d639192016-09-09 11:52:26 -070010815 if (NULL == gCamCapability[cameraId]) {
10816 rc = initCapabilities(cameraId);
10817 if (rc < 0) {
10818 pthread_mutex_unlock(&gCamLock);
10819 return rc;
10820 }
10821 }
10822
10823 if (NULL == gStaticMetadata[cameraId]) {
10824 rc = initStaticMetadata(cameraId);
10825 if (rc < 0) {
10826 pthread_mutex_unlock(&gCamLock);
10827 return rc;
10828 }
10829 }
10830
10831 switch(gCamCapability[cameraId]->position) {
10832 case CAM_POSITION_BACK:
10833 case CAM_POSITION_BACK_AUX:
10834 info->facing = CAMERA_FACING_BACK;
10835 break;
10836
10837 case CAM_POSITION_FRONT:
10838 case CAM_POSITION_FRONT_AUX:
10839 info->facing = CAMERA_FACING_FRONT;
10840 break;
10841
10842 default:
10843 LOGE("Unknown position type %d for camera id:%d",
10844 gCamCapability[cameraId]->position, cameraId);
10845 rc = -1;
10846 break;
10847 }
10848
10849
10850 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010851#ifndef USE_HAL_3_3
10852 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10853#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010854 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010855#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010856 info->static_camera_characteristics = gStaticMetadata[cameraId];
10857
10858 //For now assume both cameras can operate independently.
10859 info->conflicting_devices = NULL;
10860 info->conflicting_devices_length = 0;
10861
10862 //resource cost is 100 * MIN(1.0, m/M),
10863 //where m is throughput requirement with maximum stream configuration
10864 //and M is CPP maximum throughput.
10865 float max_fps = 0.0;
10866 for (uint32_t i = 0;
10867 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10868 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10869 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10870 }
10871 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10872 gCamCapability[cameraId]->active_array_size.width *
10873 gCamCapability[cameraId]->active_array_size.height * max_fps /
10874 gCamCapability[cameraId]->max_pixel_bandwidth;
10875 info->resource_cost = 100 * MIN(1.0, ratio);
10876 LOGI("camera %d resource cost is %d", cameraId,
10877 info->resource_cost);
10878
10879 pthread_mutex_unlock(&gCamLock);
10880 return rc;
10881}
10882
10883/*===========================================================================
10884 * FUNCTION : translateCapabilityToMetadata
10885 *
10886 * DESCRIPTION: translate the capability into camera_metadata_t
10887 *
10888 * PARAMETERS : type of the request
10889 *
10890 *
10891 * RETURN : success: camera_metadata_t*
10892 * failure: NULL
10893 *
10894 *==========================================================================*/
10895camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10896{
10897 if (mDefaultMetadata[type] != NULL) {
10898 return mDefaultMetadata[type];
10899 }
10900 //first time we are handling this request
10901 //fill up the metadata structure using the wrapper class
10902 CameraMetadata settings;
10903 //translate from cam_capability_t to camera_metadata_tag_t
10904 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10905 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10906 int32_t defaultRequestID = 0;
10907 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10908
10909 /* OIS disable */
10910 char ois_prop[PROPERTY_VALUE_MAX];
10911 memset(ois_prop, 0, sizeof(ois_prop));
10912 property_get("persist.camera.ois.disable", ois_prop, "0");
10913 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10914
10915 /* Force video to use OIS */
10916 char videoOisProp[PROPERTY_VALUE_MAX];
10917 memset(videoOisProp, 0, sizeof(videoOisProp));
10918 property_get("persist.camera.ois.video", videoOisProp, "1");
10919 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010920
10921 // Hybrid AE enable/disable
10922 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10923 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10924 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10925 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10926
Thierry Strudel3d639192016-09-09 11:52:26 -070010927 uint8_t controlIntent = 0;
10928 uint8_t focusMode;
10929 uint8_t vsMode;
10930 uint8_t optStabMode;
10931 uint8_t cacMode;
10932 uint8_t edge_mode;
10933 uint8_t noise_red_mode;
10934 uint8_t tonemap_mode;
10935 bool highQualityModeEntryAvailable = FALSE;
10936 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010937 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010938 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10939 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010940 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010941 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010942 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010943
Thierry Strudel3d639192016-09-09 11:52:26 -070010944 switch (type) {
10945 case CAMERA3_TEMPLATE_PREVIEW:
10946 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10947 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10948 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10949 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10950 edge_mode = ANDROID_EDGE_MODE_FAST;
10951 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10952 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10953 break;
10954 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10955 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10956 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10957 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10958 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10959 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10960 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10961 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10962 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10963 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10964 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10965 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10966 highQualityModeEntryAvailable = TRUE;
10967 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10968 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10969 fastModeEntryAvailable = TRUE;
10970 }
10971 }
10972 if (highQualityModeEntryAvailable) {
10973 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10974 } else if (fastModeEntryAvailable) {
10975 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10976 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010977 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10978 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10979 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010980 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010981 break;
10982 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10983 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10984 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10985 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010986 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10987 edge_mode = ANDROID_EDGE_MODE_FAST;
10988 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10989 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10990 if (forceVideoOis)
10991 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10992 break;
10993 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10994 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10995 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10996 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010997 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10998 edge_mode = ANDROID_EDGE_MODE_FAST;
10999 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11000 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11001 if (forceVideoOis)
11002 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11003 break;
11004 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11005 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11006 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11007 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11008 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11009 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11010 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11011 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11012 break;
11013 case CAMERA3_TEMPLATE_MANUAL:
11014 edge_mode = ANDROID_EDGE_MODE_FAST;
11015 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11016 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11017 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11018 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11019 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11020 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11021 break;
11022 default:
11023 edge_mode = ANDROID_EDGE_MODE_FAST;
11024 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11025 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11026 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11027 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11028 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11029 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11030 break;
11031 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011032 // Set CAC to OFF if underlying device doesn't support
11033 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11034 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11035 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011036 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11037 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11038 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11039 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11040 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11041 }
11042 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011043 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011044 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011045
11046 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11047 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11048 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11049 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11050 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11051 || ois_disable)
11052 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11053 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011054 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011055
11056 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11057 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11058
11059 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11060 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11061
11062 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11063 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11064
11065 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11066 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11067
11068 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11069 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11070
11071 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11072 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11073
11074 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11075 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11076
11077 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11078 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11079
11080 /*flash*/
11081 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11082 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11083
11084 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11085 settings.update(ANDROID_FLASH_FIRING_POWER,
11086 &flashFiringLevel, 1);
11087
11088 /* lens */
11089 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11090 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11091
11092 if (gCamCapability[mCameraId]->filter_densities_count) {
11093 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11094 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11095 gCamCapability[mCameraId]->filter_densities_count);
11096 }
11097
11098 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11099 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11100
Thierry Strudel3d639192016-09-09 11:52:26 -070011101 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11102 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11103
11104 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11105 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11106
11107 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11108 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11109
11110 /* face detection (default to OFF) */
11111 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11112 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11113
Thierry Strudel54dc9782017-02-15 12:12:10 -080011114 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11115 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011116
11117 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11118 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11119
11120 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11121 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11122
Thierry Strudel3d639192016-09-09 11:52:26 -070011123
11124 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11125 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11126
11127 /* Exposure time(Update the Min Exposure Time)*/
11128 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11129 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11130
11131 /* frame duration */
11132 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11133 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11134
11135 /* sensitivity */
11136 static const int32_t default_sensitivity = 100;
11137 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011138#ifndef USE_HAL_3_3
11139 static const int32_t default_isp_sensitivity =
11140 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11141 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11142#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011143
11144 /*edge mode*/
11145 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11146
11147 /*noise reduction mode*/
11148 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11149
11150 /*color correction mode*/
11151 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11152 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11153
11154 /*transform matrix mode*/
11155 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11156
11157 int32_t scaler_crop_region[4];
11158 scaler_crop_region[0] = 0;
11159 scaler_crop_region[1] = 0;
11160 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11161 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11162 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11163
11164 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11165 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11166
11167 /*focus distance*/
11168 float focus_distance = 0.0;
11169 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11170
11171 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011172 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011173 float max_range = 0.0;
11174 float max_fixed_fps = 0.0;
11175 int32_t fps_range[2] = {0, 0};
11176 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11177 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011178 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11179 TEMPLATE_MAX_PREVIEW_FPS) {
11180 continue;
11181 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011182 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11183 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11184 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11185 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11186 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11187 if (range > max_range) {
11188 fps_range[0] =
11189 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11190 fps_range[1] =
11191 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11192 max_range = range;
11193 }
11194 } else {
11195 if (range < 0.01 && max_fixed_fps <
11196 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11197 fps_range[0] =
11198 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11199 fps_range[1] =
11200 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11201 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11202 }
11203 }
11204 }
11205 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11206
11207 /*precapture trigger*/
11208 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11209 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11210
11211 /*af trigger*/
11212 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11213 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11214
11215 /* ae & af regions */
11216 int32_t active_region[] = {
11217 gCamCapability[mCameraId]->active_array_size.left,
11218 gCamCapability[mCameraId]->active_array_size.top,
11219 gCamCapability[mCameraId]->active_array_size.left +
11220 gCamCapability[mCameraId]->active_array_size.width,
11221 gCamCapability[mCameraId]->active_array_size.top +
11222 gCamCapability[mCameraId]->active_array_size.height,
11223 0};
11224 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11225 sizeof(active_region) / sizeof(active_region[0]));
11226 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11227 sizeof(active_region) / sizeof(active_region[0]));
11228
11229 /* black level lock */
11230 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11231 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11232
Thierry Strudel3d639192016-09-09 11:52:26 -070011233 //special defaults for manual template
11234 if (type == CAMERA3_TEMPLATE_MANUAL) {
11235 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11236 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11237
11238 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11239 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11240
11241 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11242 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11243
11244 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11245 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11246
11247 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11248 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11249
11250 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11251 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11252 }
11253
11254
11255 /* TNR
11256 * We'll use this location to determine which modes TNR will be set.
11257 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11258 * This is not to be confused with linking on a per stream basis that decision
11259 * is still on per-session basis and will be handled as part of config stream
11260 */
11261 uint8_t tnr_enable = 0;
11262
11263 if (m_bTnrPreview || m_bTnrVideo) {
11264
11265 switch (type) {
11266 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11267 tnr_enable = 1;
11268 break;
11269
11270 default:
11271 tnr_enable = 0;
11272 break;
11273 }
11274
11275 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11276 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11277 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11278
11279 LOGD("TNR:%d with process plate %d for template:%d",
11280 tnr_enable, tnr_process_type, type);
11281 }
11282
11283 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011284 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011285 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11286
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011287 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011288 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11289
Shuzhen Wang920ea402017-05-03 08:49:39 -070011290 uint8_t related_camera_id = mCameraId;
11291 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011292
11293 /* CDS default */
11294 char prop[PROPERTY_VALUE_MAX];
11295 memset(prop, 0, sizeof(prop));
11296 property_get("persist.camera.CDS", prop, "Auto");
11297 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11298 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11299 if (CAM_CDS_MODE_MAX == cds_mode) {
11300 cds_mode = CAM_CDS_MODE_AUTO;
11301 }
11302
11303 /* Disabling CDS in templates which have TNR enabled*/
11304 if (tnr_enable)
11305 cds_mode = CAM_CDS_MODE_OFF;
11306
11307 int32_t mode = cds_mode;
11308 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011309
Thierry Strudel269c81a2016-10-12 12:13:59 -070011310 /* Manual Convergence AEC Speed is disabled by default*/
11311 float default_aec_speed = 0;
11312 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11313
11314 /* Manual Convergence AWB Speed is disabled by default*/
11315 float default_awb_speed = 0;
11316 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11317
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011318 // Set instant AEC to normal convergence by default
11319 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11320 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11321
Shuzhen Wang19463d72016-03-08 11:09:52 -080011322 /* hybrid ae */
11323 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11324
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011325 if (gExposeEnableZslKey) {
11326 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11327 }
11328
Thierry Strudel3d639192016-09-09 11:52:26 -070011329 mDefaultMetadata[type] = settings.release();
11330
11331 return mDefaultMetadata[type];
11332}
11333
11334/*===========================================================================
11335 * FUNCTION : setFrameParameters
11336 *
11337 * DESCRIPTION: set parameters per frame as requested in the metadata from
11338 * framework
11339 *
11340 * PARAMETERS :
11341 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011342 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011343 * @blob_request: Whether this request is a blob request or not
11344 *
11345 * RETURN : success: NO_ERROR
11346 * failure:
11347 *==========================================================================*/
11348int QCamera3HardwareInterface::setFrameParameters(
11349 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011350 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011351 int blob_request,
11352 uint32_t snapshotStreamId)
11353{
11354 /*translate from camera_metadata_t type to parm_type_t*/
11355 int rc = 0;
11356 int32_t hal_version = CAM_HAL_V3;
11357
11358 clear_metadata_buffer(mParameters);
11359 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11360 LOGE("Failed to set hal version in the parameters");
11361 return BAD_VALUE;
11362 }
11363
11364 /*we need to update the frame number in the parameters*/
11365 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11366 request->frame_number)) {
11367 LOGE("Failed to set the frame number in the parameters");
11368 return BAD_VALUE;
11369 }
11370
11371 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011372 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011373 LOGE("Failed to set stream type mask in the parameters");
11374 return BAD_VALUE;
11375 }
11376
11377 if (mUpdateDebugLevel) {
11378 uint32_t dummyDebugLevel = 0;
11379 /* The value of dummyDebugLevel is irrelavent. On
11380 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11381 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11382 dummyDebugLevel)) {
11383 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11384 return BAD_VALUE;
11385 }
11386 mUpdateDebugLevel = false;
11387 }
11388
11389 if(request->settings != NULL){
11390 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11391 if (blob_request)
11392 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11393 }
11394
11395 return rc;
11396}
11397
11398/*===========================================================================
11399 * FUNCTION : setReprocParameters
11400 *
11401 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11402 * return it.
11403 *
11404 * PARAMETERS :
11405 * @request : request that needs to be serviced
11406 *
11407 * RETURN : success: NO_ERROR
11408 * failure:
11409 *==========================================================================*/
11410int32_t QCamera3HardwareInterface::setReprocParameters(
11411 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11412 uint32_t snapshotStreamId)
11413{
11414 /*translate from camera_metadata_t type to parm_type_t*/
11415 int rc = 0;
11416
11417 if (NULL == request->settings){
11418 LOGE("Reprocess settings cannot be NULL");
11419 return BAD_VALUE;
11420 }
11421
11422 if (NULL == reprocParam) {
11423 LOGE("Invalid reprocessing metadata buffer");
11424 return BAD_VALUE;
11425 }
11426 clear_metadata_buffer(reprocParam);
11427
11428 /*we need to update the frame number in the parameters*/
11429 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11430 request->frame_number)) {
11431 LOGE("Failed to set the frame number in the parameters");
11432 return BAD_VALUE;
11433 }
11434
11435 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11436 if (rc < 0) {
11437 LOGE("Failed to translate reproc request");
11438 return rc;
11439 }
11440
11441 CameraMetadata frame_settings;
11442 frame_settings = request->settings;
11443 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11444 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11445 int32_t *crop_count =
11446 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11447 int32_t *crop_data =
11448 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11449 int32_t *roi_map =
11450 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11451 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11452 cam_crop_data_t crop_meta;
11453 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11454 crop_meta.num_of_streams = 1;
11455 crop_meta.crop_info[0].crop.left = crop_data[0];
11456 crop_meta.crop_info[0].crop.top = crop_data[1];
11457 crop_meta.crop_info[0].crop.width = crop_data[2];
11458 crop_meta.crop_info[0].crop.height = crop_data[3];
11459
11460 crop_meta.crop_info[0].roi_map.left =
11461 roi_map[0];
11462 crop_meta.crop_info[0].roi_map.top =
11463 roi_map[1];
11464 crop_meta.crop_info[0].roi_map.width =
11465 roi_map[2];
11466 crop_meta.crop_info[0].roi_map.height =
11467 roi_map[3];
11468
11469 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11470 rc = BAD_VALUE;
11471 }
11472 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11473 request->input_buffer->stream,
11474 crop_meta.crop_info[0].crop.left,
11475 crop_meta.crop_info[0].crop.top,
11476 crop_meta.crop_info[0].crop.width,
11477 crop_meta.crop_info[0].crop.height);
11478 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11479 request->input_buffer->stream,
11480 crop_meta.crop_info[0].roi_map.left,
11481 crop_meta.crop_info[0].roi_map.top,
11482 crop_meta.crop_info[0].roi_map.width,
11483 crop_meta.crop_info[0].roi_map.height);
11484 } else {
11485 LOGE("Invalid reprocess crop count %d!", *crop_count);
11486 }
11487 } else {
11488 LOGE("No crop data from matching output stream");
11489 }
11490
11491 /* These settings are not needed for regular requests so handle them specially for
11492 reprocess requests; information needed for EXIF tags */
11493 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11494 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11495 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11496 if (NAME_NOT_FOUND != val) {
11497 uint32_t flashMode = (uint32_t)val;
11498 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11499 rc = BAD_VALUE;
11500 }
11501 } else {
11502 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11503 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11504 }
11505 } else {
11506 LOGH("No flash mode in reprocess settings");
11507 }
11508
11509 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11510 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11511 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11512 rc = BAD_VALUE;
11513 }
11514 } else {
11515 LOGH("No flash state in reprocess settings");
11516 }
11517
11518 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11519 uint8_t *reprocessFlags =
11520 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11521 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11522 *reprocessFlags)) {
11523 rc = BAD_VALUE;
11524 }
11525 }
11526
Thierry Strudel54dc9782017-02-15 12:12:10 -080011527 // Add exif debug data to internal metadata
11528 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11529 mm_jpeg_debug_exif_params_t *debug_params =
11530 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11531 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11532 // AE
11533 if (debug_params->ae_debug_params_valid == TRUE) {
11534 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11535 debug_params->ae_debug_params);
11536 }
11537 // AWB
11538 if (debug_params->awb_debug_params_valid == TRUE) {
11539 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11540 debug_params->awb_debug_params);
11541 }
11542 // AF
11543 if (debug_params->af_debug_params_valid == TRUE) {
11544 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11545 debug_params->af_debug_params);
11546 }
11547 // ASD
11548 if (debug_params->asd_debug_params_valid == TRUE) {
11549 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11550 debug_params->asd_debug_params);
11551 }
11552 // Stats
11553 if (debug_params->stats_debug_params_valid == TRUE) {
11554 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11555 debug_params->stats_debug_params);
11556 }
11557 // BE Stats
11558 if (debug_params->bestats_debug_params_valid == TRUE) {
11559 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11560 debug_params->bestats_debug_params);
11561 }
11562 // BHIST
11563 if (debug_params->bhist_debug_params_valid == TRUE) {
11564 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11565 debug_params->bhist_debug_params);
11566 }
11567 // 3A Tuning
11568 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11569 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11570 debug_params->q3a_tuning_debug_params);
11571 }
11572 }
11573
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011574 // Add metadata which reprocess needs
11575 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11576 cam_reprocess_info_t *repro_info =
11577 (cam_reprocess_info_t *)frame_settings.find
11578 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011579 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011580 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011581 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011582 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011583 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011584 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011585 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011586 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011587 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011588 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011589 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011590 repro_info->pipeline_flip);
11591 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11592 repro_info->af_roi);
11593 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11594 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011595 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11596 CAM_INTF_PARM_ROTATION metadata then has been added in
11597 translateToHalMetadata. HAL need to keep this new rotation
11598 metadata. Otherwise, the old rotation info saved in the vendor tag
11599 would be used */
11600 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11601 CAM_INTF_PARM_ROTATION, reprocParam) {
11602 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11603 } else {
11604 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011605 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011606 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011607 }
11608
11609 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11610 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11611 roi.width and roi.height would be the final JPEG size.
11612 For now, HAL only checks this for reprocess request */
11613 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11614 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11615 uint8_t *enable =
11616 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11617 if (*enable == TRUE) {
11618 int32_t *crop_data =
11619 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11620 cam_stream_crop_info_t crop_meta;
11621 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11622 crop_meta.stream_id = 0;
11623 crop_meta.crop.left = crop_data[0];
11624 crop_meta.crop.top = crop_data[1];
11625 crop_meta.crop.width = crop_data[2];
11626 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011627 // The JPEG crop roi should match cpp output size
11628 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11629 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11630 crop_meta.roi_map.left = 0;
11631 crop_meta.roi_map.top = 0;
11632 crop_meta.roi_map.width = cpp_crop->crop.width;
11633 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011634 }
11635 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11636 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011637 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011638 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011639 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11640 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011641 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011642 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11643
11644 // Add JPEG scale information
11645 cam_dimension_t scale_dim;
11646 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11647 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11648 int32_t *roi =
11649 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11650 scale_dim.width = roi[2];
11651 scale_dim.height = roi[3];
11652 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11653 scale_dim);
11654 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11655 scale_dim.width, scale_dim.height, mCameraId);
11656 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011657 }
11658 }
11659
11660 return rc;
11661}
11662
11663/*===========================================================================
11664 * FUNCTION : saveRequestSettings
11665 *
11666 * DESCRIPTION: Add any settings that might have changed to the request settings
11667 * and save the settings to be applied on the frame
11668 *
11669 * PARAMETERS :
11670 * @jpegMetadata : the extracted and/or modified jpeg metadata
11671 * @request : request with initial settings
11672 *
11673 * RETURN :
11674 * camera_metadata_t* : pointer to the saved request settings
11675 *==========================================================================*/
11676camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11677 const CameraMetadata &jpegMetadata,
11678 camera3_capture_request_t *request)
11679{
11680 camera_metadata_t *resultMetadata;
11681 CameraMetadata camMetadata;
11682 camMetadata = request->settings;
11683
11684 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11685 int32_t thumbnail_size[2];
11686 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11687 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11688 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11689 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11690 }
11691
11692 if (request->input_buffer != NULL) {
11693 uint8_t reprocessFlags = 1;
11694 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11695 (uint8_t*)&reprocessFlags,
11696 sizeof(reprocessFlags));
11697 }
11698
11699 resultMetadata = camMetadata.release();
11700 return resultMetadata;
11701}
11702
11703/*===========================================================================
11704 * FUNCTION : setHalFpsRange
11705 *
11706 * DESCRIPTION: set FPS range parameter
11707 *
11708 *
11709 * PARAMETERS :
11710 * @settings : Metadata from framework
11711 * @hal_metadata: Metadata buffer
11712 *
11713 *
11714 * RETURN : success: NO_ERROR
11715 * failure:
11716 *==========================================================================*/
11717int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11718 metadata_buffer_t *hal_metadata)
11719{
11720 int32_t rc = NO_ERROR;
11721 cam_fps_range_t fps_range;
11722 fps_range.min_fps = (float)
11723 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11724 fps_range.max_fps = (float)
11725 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11726 fps_range.video_min_fps = fps_range.min_fps;
11727 fps_range.video_max_fps = fps_range.max_fps;
11728
11729 LOGD("aeTargetFpsRange fps: [%f %f]",
11730 fps_range.min_fps, fps_range.max_fps);
11731 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11732 * follows:
11733 * ---------------------------------------------------------------|
11734 * Video stream is absent in configure_streams |
11735 * (Camcorder preview before the first video record |
11736 * ---------------------------------------------------------------|
11737 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11738 * | | | vid_min/max_fps|
11739 * ---------------------------------------------------------------|
11740 * NO | [ 30, 240] | 240 | [240, 240] |
11741 * |-------------|-------------|----------------|
11742 * | [240, 240] | 240 | [240, 240] |
11743 * ---------------------------------------------------------------|
11744 * Video stream is present in configure_streams |
11745 * ---------------------------------------------------------------|
11746 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11747 * | | | vid_min/max_fps|
11748 * ---------------------------------------------------------------|
11749 * NO | [ 30, 240] | 240 | [240, 240] |
11750 * (camcorder prev |-------------|-------------|----------------|
11751 * after video rec | [240, 240] | 240 | [240, 240] |
11752 * is stopped) | | | |
11753 * ---------------------------------------------------------------|
11754 * YES | [ 30, 240] | 240 | [240, 240] |
11755 * |-------------|-------------|----------------|
11756 * | [240, 240] | 240 | [240, 240] |
11757 * ---------------------------------------------------------------|
11758 * When Video stream is absent in configure_streams,
11759 * preview fps = sensor_fps / batchsize
11760 * Eg: for 240fps at batchSize 4, preview = 60fps
11761 * for 120fps at batchSize 4, preview = 30fps
11762 *
11763 * When video stream is present in configure_streams, preview fps is as per
11764 * the ratio of preview buffers to video buffers requested in process
11765 * capture request
11766 */
11767 mBatchSize = 0;
11768 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11769 fps_range.min_fps = fps_range.video_max_fps;
11770 fps_range.video_min_fps = fps_range.video_max_fps;
11771 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11772 fps_range.max_fps);
11773 if (NAME_NOT_FOUND != val) {
11774 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11775 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11776 return BAD_VALUE;
11777 }
11778
11779 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11780 /* If batchmode is currently in progress and the fps changes,
11781 * set the flag to restart the sensor */
11782 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11783 (mHFRVideoFps != fps_range.max_fps)) {
11784 mNeedSensorRestart = true;
11785 }
11786 mHFRVideoFps = fps_range.max_fps;
11787 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11788 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11789 mBatchSize = MAX_HFR_BATCH_SIZE;
11790 }
11791 }
11792 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11793
11794 }
11795 } else {
11796 /* HFR mode is session param in backend/ISP. This should be reset when
11797 * in non-HFR mode */
11798 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11799 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11800 return BAD_VALUE;
11801 }
11802 }
11803 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11804 return BAD_VALUE;
11805 }
11806 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11807 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11808 return rc;
11809}
11810
11811/*===========================================================================
11812 * FUNCTION : translateToHalMetadata
11813 *
11814 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11815 *
11816 *
11817 * PARAMETERS :
11818 * @request : request sent from framework
11819 *
11820 *
11821 * RETURN : success: NO_ERROR
11822 * failure:
11823 *==========================================================================*/
11824int QCamera3HardwareInterface::translateToHalMetadata
11825 (const camera3_capture_request_t *request,
11826 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011827 uint32_t snapshotStreamId) {
11828 if (request == nullptr || hal_metadata == nullptr) {
11829 return BAD_VALUE;
11830 }
11831
11832 int64_t minFrameDuration = getMinFrameDuration(request);
11833
11834 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11835 minFrameDuration);
11836}
11837
11838int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11839 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11840 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11841
Thierry Strudel3d639192016-09-09 11:52:26 -070011842 int rc = 0;
11843 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011844 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011845
11846 /* Do not change the order of the following list unless you know what you are
11847 * doing.
11848 * The order is laid out in such a way that parameters in the front of the table
11849 * may be used to override the parameters later in the table. Examples are:
11850 * 1. META_MODE should precede AEC/AWB/AF MODE
11851 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11852 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11853 * 4. Any mode should precede it's corresponding settings
11854 */
11855 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11856 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11857 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11858 rc = BAD_VALUE;
11859 }
11860 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11861 if (rc != NO_ERROR) {
11862 LOGE("extractSceneMode failed");
11863 }
11864 }
11865
11866 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11867 uint8_t fwk_aeMode =
11868 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11869 uint8_t aeMode;
11870 int32_t redeye;
11871
11872 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11873 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011874 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11875 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011876 } else {
11877 aeMode = CAM_AE_MODE_ON;
11878 }
11879 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11880 redeye = 1;
11881 } else {
11882 redeye = 0;
11883 }
11884
11885 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11886 fwk_aeMode);
11887 if (NAME_NOT_FOUND != val) {
11888 int32_t flashMode = (int32_t)val;
11889 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11890 }
11891
11892 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11893 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11894 rc = BAD_VALUE;
11895 }
11896 }
11897
11898 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11899 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11900 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11901 fwk_whiteLevel);
11902 if (NAME_NOT_FOUND != val) {
11903 uint8_t whiteLevel = (uint8_t)val;
11904 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11905 rc = BAD_VALUE;
11906 }
11907 }
11908 }
11909
11910 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11911 uint8_t fwk_cacMode =
11912 frame_settings.find(
11913 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11914 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11915 fwk_cacMode);
11916 if (NAME_NOT_FOUND != val) {
11917 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11918 bool entryAvailable = FALSE;
11919 // Check whether Frameworks set CAC mode is supported in device or not
11920 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11921 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11922 entryAvailable = TRUE;
11923 break;
11924 }
11925 }
11926 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11927 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11928 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11929 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11930 if (entryAvailable == FALSE) {
11931 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11932 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11933 } else {
11934 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11935 // High is not supported and so set the FAST as spec say's underlying
11936 // device implementation can be the same for both modes.
11937 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11938 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11939 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11940 // in order to avoid the fps drop due to high quality
11941 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11942 } else {
11943 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11944 }
11945 }
11946 }
11947 LOGD("Final cacMode is %d", cacMode);
11948 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11949 rc = BAD_VALUE;
11950 }
11951 } else {
11952 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11953 }
11954 }
11955
Thierry Strudel2896d122017-02-23 19:18:03 -080011956 char af_value[PROPERTY_VALUE_MAX];
11957 property_get("persist.camera.af.infinity", af_value, "0");
11958
Jason Lee84ae9972017-02-24 13:24:24 -080011959 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011960 if (atoi(af_value) == 0) {
11961 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011962 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011963 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11964 fwk_focusMode);
11965 if (NAME_NOT_FOUND != val) {
11966 uint8_t focusMode = (uint8_t)val;
11967 LOGD("set focus mode %d", focusMode);
11968 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11969 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11970 rc = BAD_VALUE;
11971 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011972 }
11973 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011974 } else {
11975 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11976 LOGE("Focus forced to infinity %d", focusMode);
11977 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11978 rc = BAD_VALUE;
11979 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011980 }
11981
Jason Lee84ae9972017-02-24 13:24:24 -080011982 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11983 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011984 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11985 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11986 focalDistance)) {
11987 rc = BAD_VALUE;
11988 }
11989 }
11990
11991 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11992 uint8_t fwk_antibandingMode =
11993 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11994 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11995 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11996 if (NAME_NOT_FOUND != val) {
11997 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011998 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11999 if (m60HzZone) {
12000 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12001 } else {
12002 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12003 }
12004 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012005 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12006 hal_antibandingMode)) {
12007 rc = BAD_VALUE;
12008 }
12009 }
12010 }
12011
12012 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12013 int32_t expCompensation = frame_settings.find(
12014 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12015 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12016 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12017 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12018 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012019 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012020 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12021 expCompensation)) {
12022 rc = BAD_VALUE;
12023 }
12024 }
12025
12026 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12027 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12028 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12029 rc = BAD_VALUE;
12030 }
12031 }
12032 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12033 rc = setHalFpsRange(frame_settings, hal_metadata);
12034 if (rc != NO_ERROR) {
12035 LOGE("setHalFpsRange failed");
12036 }
12037 }
12038
12039 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12040 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12041 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12042 rc = BAD_VALUE;
12043 }
12044 }
12045
12046 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12047 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12048 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12049 fwk_effectMode);
12050 if (NAME_NOT_FOUND != val) {
12051 uint8_t effectMode = (uint8_t)val;
12052 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12053 rc = BAD_VALUE;
12054 }
12055 }
12056 }
12057
12058 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12059 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12060 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12061 colorCorrectMode)) {
12062 rc = BAD_VALUE;
12063 }
12064 }
12065
12066 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12067 cam_color_correct_gains_t colorCorrectGains;
12068 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12069 colorCorrectGains.gains[i] =
12070 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12071 }
12072 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12073 colorCorrectGains)) {
12074 rc = BAD_VALUE;
12075 }
12076 }
12077
12078 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12079 cam_color_correct_matrix_t colorCorrectTransform;
12080 cam_rational_type_t transform_elem;
12081 size_t num = 0;
12082 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12083 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12084 transform_elem.numerator =
12085 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12086 transform_elem.denominator =
12087 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12088 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12089 num++;
12090 }
12091 }
12092 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12093 colorCorrectTransform)) {
12094 rc = BAD_VALUE;
12095 }
12096 }
12097
12098 cam_trigger_t aecTrigger;
12099 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12100 aecTrigger.trigger_id = -1;
12101 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12102 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12103 aecTrigger.trigger =
12104 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12105 aecTrigger.trigger_id =
12106 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12107 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12108 aecTrigger)) {
12109 rc = BAD_VALUE;
12110 }
12111 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12112 aecTrigger.trigger, aecTrigger.trigger_id);
12113 }
12114
12115 /*af_trigger must come with a trigger id*/
12116 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12117 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12118 cam_trigger_t af_trigger;
12119 af_trigger.trigger =
12120 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12121 af_trigger.trigger_id =
12122 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12123 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12124 rc = BAD_VALUE;
12125 }
12126 LOGD("AfTrigger: %d AfTriggerID: %d",
12127 af_trigger.trigger, af_trigger.trigger_id);
12128 }
12129
12130 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12131 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12132 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12133 rc = BAD_VALUE;
12134 }
12135 }
12136 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12137 cam_edge_application_t edge_application;
12138 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012139
Thierry Strudel3d639192016-09-09 11:52:26 -070012140 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12141 edge_application.sharpness = 0;
12142 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012143 edge_application.sharpness =
12144 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12145 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12146 int32_t sharpness =
12147 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12148 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12149 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12150 LOGD("Setting edge mode sharpness %d", sharpness);
12151 edge_application.sharpness = sharpness;
12152 }
12153 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012154 }
12155 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12156 rc = BAD_VALUE;
12157 }
12158 }
12159
12160 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12161 int32_t respectFlashMode = 1;
12162 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12163 uint8_t fwk_aeMode =
12164 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012165 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12166 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12167 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012168 respectFlashMode = 0;
12169 LOGH("AE Mode controls flash, ignore android.flash.mode");
12170 }
12171 }
12172 if (respectFlashMode) {
12173 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12174 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12175 LOGH("flash mode after mapping %d", val);
12176 // To check: CAM_INTF_META_FLASH_MODE usage
12177 if (NAME_NOT_FOUND != val) {
12178 uint8_t flashMode = (uint8_t)val;
12179 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12180 rc = BAD_VALUE;
12181 }
12182 }
12183 }
12184 }
12185
12186 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12187 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12188 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12189 rc = BAD_VALUE;
12190 }
12191 }
12192
12193 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12194 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12195 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12196 flashFiringTime)) {
12197 rc = BAD_VALUE;
12198 }
12199 }
12200
12201 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12202 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12203 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12204 hotPixelMode)) {
12205 rc = BAD_VALUE;
12206 }
12207 }
12208
12209 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12210 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12211 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12212 lensAperture)) {
12213 rc = BAD_VALUE;
12214 }
12215 }
12216
12217 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12218 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12219 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12220 filterDensity)) {
12221 rc = BAD_VALUE;
12222 }
12223 }
12224
12225 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12226 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12227 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12228 focalLength)) {
12229 rc = BAD_VALUE;
12230 }
12231 }
12232
12233 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12234 uint8_t optStabMode =
12235 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12236 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12237 optStabMode)) {
12238 rc = BAD_VALUE;
12239 }
12240 }
12241
12242 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12243 uint8_t videoStabMode =
12244 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12245 LOGD("videoStabMode from APP = %d", videoStabMode);
12246 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12247 videoStabMode)) {
12248 rc = BAD_VALUE;
12249 }
12250 }
12251
12252
12253 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12254 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12255 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12256 noiseRedMode)) {
12257 rc = BAD_VALUE;
12258 }
12259 }
12260
12261 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12262 float reprocessEffectiveExposureFactor =
12263 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12264 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12265 reprocessEffectiveExposureFactor)) {
12266 rc = BAD_VALUE;
12267 }
12268 }
12269
12270 cam_crop_region_t scalerCropRegion;
12271 bool scalerCropSet = false;
12272 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12273 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12274 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12275 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12276 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12277
12278 // Map coordinate system from active array to sensor output.
12279 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12280 scalerCropRegion.width, scalerCropRegion.height);
12281
12282 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12283 scalerCropRegion)) {
12284 rc = BAD_VALUE;
12285 }
12286 scalerCropSet = true;
12287 }
12288
12289 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12290 int64_t sensorExpTime =
12291 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12292 LOGD("setting sensorExpTime %lld", sensorExpTime);
12293 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12294 sensorExpTime)) {
12295 rc = BAD_VALUE;
12296 }
12297 }
12298
12299 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12300 int64_t sensorFrameDuration =
12301 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012302 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12303 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12304 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12305 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12306 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12307 sensorFrameDuration)) {
12308 rc = BAD_VALUE;
12309 }
12310 }
12311
12312 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12313 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12314 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12315 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12316 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12317 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12318 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12319 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12320 sensorSensitivity)) {
12321 rc = BAD_VALUE;
12322 }
12323 }
12324
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012325#ifndef USE_HAL_3_3
12326 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12327 int32_t ispSensitivity =
12328 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12329 if (ispSensitivity <
12330 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12331 ispSensitivity =
12332 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12333 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12334 }
12335 if (ispSensitivity >
12336 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12337 ispSensitivity =
12338 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12339 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12340 }
12341 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12342 ispSensitivity)) {
12343 rc = BAD_VALUE;
12344 }
12345 }
12346#endif
12347
Thierry Strudel3d639192016-09-09 11:52:26 -070012348 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12349 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12350 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12351 rc = BAD_VALUE;
12352 }
12353 }
12354
12355 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12356 uint8_t fwk_facedetectMode =
12357 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12358
12359 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12360 fwk_facedetectMode);
12361
12362 if (NAME_NOT_FOUND != val) {
12363 uint8_t facedetectMode = (uint8_t)val;
12364 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12365 facedetectMode)) {
12366 rc = BAD_VALUE;
12367 }
12368 }
12369 }
12370
Thierry Strudel54dc9782017-02-15 12:12:10 -080012371 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012372 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012373 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012374 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12375 histogramMode)) {
12376 rc = BAD_VALUE;
12377 }
12378 }
12379
12380 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12381 uint8_t sharpnessMapMode =
12382 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12383 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12384 sharpnessMapMode)) {
12385 rc = BAD_VALUE;
12386 }
12387 }
12388
12389 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12390 uint8_t tonemapMode =
12391 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12392 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12393 rc = BAD_VALUE;
12394 }
12395 }
12396 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12397 /*All tonemap channels will have the same number of points*/
12398 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12399 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12400 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12401 cam_rgb_tonemap_curves tonemapCurves;
12402 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12403 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12404 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12405 tonemapCurves.tonemap_points_cnt,
12406 CAM_MAX_TONEMAP_CURVE_SIZE);
12407 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12408 }
12409
12410 /* ch0 = G*/
12411 size_t point = 0;
12412 cam_tonemap_curve_t tonemapCurveGreen;
12413 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12414 for (size_t j = 0; j < 2; j++) {
12415 tonemapCurveGreen.tonemap_points[i][j] =
12416 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12417 point++;
12418 }
12419 }
12420 tonemapCurves.curves[0] = tonemapCurveGreen;
12421
12422 /* ch 1 = B */
12423 point = 0;
12424 cam_tonemap_curve_t tonemapCurveBlue;
12425 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12426 for (size_t j = 0; j < 2; j++) {
12427 tonemapCurveBlue.tonemap_points[i][j] =
12428 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12429 point++;
12430 }
12431 }
12432 tonemapCurves.curves[1] = tonemapCurveBlue;
12433
12434 /* ch 2 = R */
12435 point = 0;
12436 cam_tonemap_curve_t tonemapCurveRed;
12437 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12438 for (size_t j = 0; j < 2; j++) {
12439 tonemapCurveRed.tonemap_points[i][j] =
12440 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12441 point++;
12442 }
12443 }
12444 tonemapCurves.curves[2] = tonemapCurveRed;
12445
12446 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12447 tonemapCurves)) {
12448 rc = BAD_VALUE;
12449 }
12450 }
12451
12452 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12453 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12454 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12455 captureIntent)) {
12456 rc = BAD_VALUE;
12457 }
12458 }
12459
12460 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12461 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12462 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12463 blackLevelLock)) {
12464 rc = BAD_VALUE;
12465 }
12466 }
12467
12468 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12469 uint8_t lensShadingMapMode =
12470 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12471 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12472 lensShadingMapMode)) {
12473 rc = BAD_VALUE;
12474 }
12475 }
12476
12477 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12478 cam_area_t roi;
12479 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012480 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012481
12482 // Map coordinate system from active array to sensor output.
12483 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12484 roi.rect.height);
12485
12486 if (scalerCropSet) {
12487 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12488 }
12489 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12490 rc = BAD_VALUE;
12491 }
12492 }
12493
12494 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12495 cam_area_t roi;
12496 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012497 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012498
12499 // Map coordinate system from active array to sensor output.
12500 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12501 roi.rect.height);
12502
12503 if (scalerCropSet) {
12504 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12505 }
12506 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12507 rc = BAD_VALUE;
12508 }
12509 }
12510
12511 // CDS for non-HFR non-video mode
12512 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12513 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12514 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12515 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12516 LOGE("Invalid CDS mode %d!", *fwk_cds);
12517 } else {
12518 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12519 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12520 rc = BAD_VALUE;
12521 }
12522 }
12523 }
12524
Thierry Strudel04e026f2016-10-10 11:27:36 -070012525 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012526 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012527 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012528 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12529 }
12530 if (m_bVideoHdrEnabled)
12531 vhdr = CAM_VIDEO_HDR_MODE_ON;
12532
Thierry Strudel54dc9782017-02-15 12:12:10 -080012533 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12534
12535 if(vhdr != curr_hdr_state)
12536 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12537
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012538 rc = setVideoHdrMode(mParameters, vhdr);
12539 if (rc != NO_ERROR) {
12540 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012541 }
12542
12543 //IR
12544 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12545 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12546 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012547 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12548 uint8_t isIRon = 0;
12549
12550 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012551 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12552 LOGE("Invalid IR mode %d!", fwk_ir);
12553 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012554 if(isIRon != curr_ir_state )
12555 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12556
Thierry Strudel04e026f2016-10-10 11:27:36 -070012557 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12558 CAM_INTF_META_IR_MODE, fwk_ir)) {
12559 rc = BAD_VALUE;
12560 }
12561 }
12562 }
12563
Thierry Strudel54dc9782017-02-15 12:12:10 -080012564 //Binning Correction Mode
12565 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12566 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12567 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12568 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12569 || (0 > fwk_binning_correction)) {
12570 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12571 } else {
12572 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12573 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12574 rc = BAD_VALUE;
12575 }
12576 }
12577 }
12578
Thierry Strudel269c81a2016-10-12 12:13:59 -070012579 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12580 float aec_speed;
12581 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12582 LOGD("AEC Speed :%f", aec_speed);
12583 if ( aec_speed < 0 ) {
12584 LOGE("Invalid AEC mode %f!", aec_speed);
12585 } else {
12586 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12587 aec_speed)) {
12588 rc = BAD_VALUE;
12589 }
12590 }
12591 }
12592
12593 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12594 float awb_speed;
12595 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12596 LOGD("AWB Speed :%f", awb_speed);
12597 if ( awb_speed < 0 ) {
12598 LOGE("Invalid AWB mode %f!", awb_speed);
12599 } else {
12600 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12601 awb_speed)) {
12602 rc = BAD_VALUE;
12603 }
12604 }
12605 }
12606
Thierry Strudel3d639192016-09-09 11:52:26 -070012607 // TNR
12608 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12609 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12610 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012611 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012612 cam_denoise_param_t tnr;
12613 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12614 tnr.process_plates =
12615 (cam_denoise_process_type_t)frame_settings.find(
12616 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12617 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012618
12619 if(b_TnrRequested != curr_tnr_state)
12620 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12621
Thierry Strudel3d639192016-09-09 11:52:26 -070012622 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12623 rc = BAD_VALUE;
12624 }
12625 }
12626
Thierry Strudel54dc9782017-02-15 12:12:10 -080012627 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012628 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012629 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012630 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12631 *exposure_metering_mode)) {
12632 rc = BAD_VALUE;
12633 }
12634 }
12635
Thierry Strudel3d639192016-09-09 11:52:26 -070012636 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12637 int32_t fwk_testPatternMode =
12638 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12639 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12640 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12641
12642 if (NAME_NOT_FOUND != testPatternMode) {
12643 cam_test_pattern_data_t testPatternData;
12644 memset(&testPatternData, 0, sizeof(testPatternData));
12645 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12646 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12647 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12648 int32_t *fwk_testPatternData =
12649 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12650 testPatternData.r = fwk_testPatternData[0];
12651 testPatternData.b = fwk_testPatternData[3];
12652 switch (gCamCapability[mCameraId]->color_arrangement) {
12653 case CAM_FILTER_ARRANGEMENT_RGGB:
12654 case CAM_FILTER_ARRANGEMENT_GRBG:
12655 testPatternData.gr = fwk_testPatternData[1];
12656 testPatternData.gb = fwk_testPatternData[2];
12657 break;
12658 case CAM_FILTER_ARRANGEMENT_GBRG:
12659 case CAM_FILTER_ARRANGEMENT_BGGR:
12660 testPatternData.gr = fwk_testPatternData[2];
12661 testPatternData.gb = fwk_testPatternData[1];
12662 break;
12663 default:
12664 LOGE("color arrangement %d is not supported",
12665 gCamCapability[mCameraId]->color_arrangement);
12666 break;
12667 }
12668 }
12669 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12670 testPatternData)) {
12671 rc = BAD_VALUE;
12672 }
12673 } else {
12674 LOGE("Invalid framework sensor test pattern mode %d",
12675 fwk_testPatternMode);
12676 }
12677 }
12678
12679 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12680 size_t count = 0;
12681 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12682 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12683 gps_coords.data.d, gps_coords.count, count);
12684 if (gps_coords.count != count) {
12685 rc = BAD_VALUE;
12686 }
12687 }
12688
12689 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12690 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12691 size_t count = 0;
12692 const char *gps_methods_src = (const char *)
12693 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12694 memset(gps_methods, '\0', sizeof(gps_methods));
12695 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12696 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12697 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12698 if (GPS_PROCESSING_METHOD_SIZE != count) {
12699 rc = BAD_VALUE;
12700 }
12701 }
12702
12703 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12704 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12705 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12706 gps_timestamp)) {
12707 rc = BAD_VALUE;
12708 }
12709 }
12710
12711 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12712 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12713 cam_rotation_info_t rotation_info;
12714 if (orientation == 0) {
12715 rotation_info.rotation = ROTATE_0;
12716 } else if (orientation == 90) {
12717 rotation_info.rotation = ROTATE_90;
12718 } else if (orientation == 180) {
12719 rotation_info.rotation = ROTATE_180;
12720 } else if (orientation == 270) {
12721 rotation_info.rotation = ROTATE_270;
12722 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012723 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012724 rotation_info.streamId = snapshotStreamId;
12725 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12726 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12727 rc = BAD_VALUE;
12728 }
12729 }
12730
12731 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12732 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12733 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12734 rc = BAD_VALUE;
12735 }
12736 }
12737
12738 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12739 uint32_t thumb_quality = (uint32_t)
12740 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12741 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12742 thumb_quality)) {
12743 rc = BAD_VALUE;
12744 }
12745 }
12746
12747 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12748 cam_dimension_t dim;
12749 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12750 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12751 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12752 rc = BAD_VALUE;
12753 }
12754 }
12755
12756 // Internal metadata
12757 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12758 size_t count = 0;
12759 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12760 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12761 privatedata.data.i32, privatedata.count, count);
12762 if (privatedata.count != count) {
12763 rc = BAD_VALUE;
12764 }
12765 }
12766
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012767 // ISO/Exposure Priority
12768 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12769 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12770 cam_priority_mode_t mode =
12771 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12772 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12773 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12774 use_iso_exp_pty.previewOnly = FALSE;
12775 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12776 use_iso_exp_pty.value = *ptr;
12777
12778 if(CAM_ISO_PRIORITY == mode) {
12779 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12780 use_iso_exp_pty)) {
12781 rc = BAD_VALUE;
12782 }
12783 }
12784 else {
12785 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12786 use_iso_exp_pty)) {
12787 rc = BAD_VALUE;
12788 }
12789 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012790
12791 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12792 rc = BAD_VALUE;
12793 }
12794 }
12795 } else {
12796 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12797 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012798 }
12799 }
12800
12801 // Saturation
12802 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12803 int32_t* use_saturation =
12804 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12805 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12806 rc = BAD_VALUE;
12807 }
12808 }
12809
Thierry Strudel3d639192016-09-09 11:52:26 -070012810 // EV step
12811 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12812 gCamCapability[mCameraId]->exp_compensation_step)) {
12813 rc = BAD_VALUE;
12814 }
12815
12816 // CDS info
12817 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12818 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12819 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12820
12821 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12822 CAM_INTF_META_CDS_DATA, *cdsData)) {
12823 rc = BAD_VALUE;
12824 }
12825 }
12826
Shuzhen Wang19463d72016-03-08 11:09:52 -080012827 // Hybrid AE
12828 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12829 uint8_t *hybrid_ae = (uint8_t *)
12830 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12831
12832 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12833 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12834 rc = BAD_VALUE;
12835 }
12836 }
12837
Shuzhen Wang14415f52016-11-16 18:26:18 -080012838 // Histogram
12839 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12840 uint8_t histogramMode =
12841 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12842 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12843 histogramMode)) {
12844 rc = BAD_VALUE;
12845 }
12846 }
12847
12848 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12849 int32_t histogramBins =
12850 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12851 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12852 histogramBins)) {
12853 rc = BAD_VALUE;
12854 }
12855 }
12856
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012857 // Tracking AF
12858 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12859 uint8_t trackingAfTrigger =
12860 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12861 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12862 trackingAfTrigger)) {
12863 rc = BAD_VALUE;
12864 }
12865 }
12866
Thierry Strudel3d639192016-09-09 11:52:26 -070012867 return rc;
12868}
12869
12870/*===========================================================================
12871 * FUNCTION : captureResultCb
12872 *
12873 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12874 *
12875 * PARAMETERS :
12876 * @frame : frame information from mm-camera-interface
12877 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12878 * @userdata: userdata
12879 *
12880 * RETURN : NONE
12881 *==========================================================================*/
12882void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12883 camera3_stream_buffer_t *buffer,
12884 uint32_t frame_number, bool isInputBuffer, void *userdata)
12885{
12886 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12887 if (hw == NULL) {
12888 LOGE("Invalid hw %p", hw);
12889 return;
12890 }
12891
12892 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12893 return;
12894}
12895
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012896/*===========================================================================
12897 * FUNCTION : setBufferErrorStatus
12898 *
12899 * DESCRIPTION: Callback handler for channels to report any buffer errors
12900 *
12901 * PARAMETERS :
12902 * @ch : Channel on which buffer error is reported from
12903 * @frame_number : frame number on which buffer error is reported on
12904 * @buffer_status : buffer error status
12905 * @userdata: userdata
12906 *
12907 * RETURN : NONE
12908 *==========================================================================*/
12909void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12910 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12911{
12912 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12913 if (hw == NULL) {
12914 LOGE("Invalid hw %p", hw);
12915 return;
12916 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012917
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012918 hw->setBufferErrorStatus(ch, frame_number, err);
12919 return;
12920}
12921
12922void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12923 uint32_t frameNumber, camera3_buffer_status_t err)
12924{
12925 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12926 pthread_mutex_lock(&mMutex);
12927
12928 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12929 if (req.frame_number != frameNumber)
12930 continue;
12931 for (auto& k : req.mPendingBufferList) {
12932 if(k.stream->priv == ch) {
12933 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12934 }
12935 }
12936 }
12937
12938 pthread_mutex_unlock(&mMutex);
12939 return;
12940}
Thierry Strudel3d639192016-09-09 11:52:26 -070012941/*===========================================================================
12942 * FUNCTION : initialize
12943 *
12944 * DESCRIPTION: Pass framework callback pointers to HAL
12945 *
12946 * PARAMETERS :
12947 *
12948 *
12949 * RETURN : Success : 0
12950 * Failure: -ENODEV
12951 *==========================================================================*/
12952
12953int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12954 const camera3_callback_ops_t *callback_ops)
12955{
12956 LOGD("E");
12957 QCamera3HardwareInterface *hw =
12958 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12959 if (!hw) {
12960 LOGE("NULL camera device");
12961 return -ENODEV;
12962 }
12963
12964 int rc = hw->initialize(callback_ops);
12965 LOGD("X");
12966 return rc;
12967}
12968
12969/*===========================================================================
12970 * FUNCTION : configure_streams
12971 *
12972 * DESCRIPTION:
12973 *
12974 * PARAMETERS :
12975 *
12976 *
12977 * RETURN : Success: 0
12978 * Failure: -EINVAL (if stream configuration is invalid)
12979 * -ENODEV (fatal error)
12980 *==========================================================================*/
12981
12982int QCamera3HardwareInterface::configure_streams(
12983 const struct camera3_device *device,
12984 camera3_stream_configuration_t *stream_list)
12985{
12986 LOGD("E");
12987 QCamera3HardwareInterface *hw =
12988 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12989 if (!hw) {
12990 LOGE("NULL camera device");
12991 return -ENODEV;
12992 }
12993 int rc = hw->configureStreams(stream_list);
12994 LOGD("X");
12995 return rc;
12996}
12997
12998/*===========================================================================
12999 * FUNCTION : construct_default_request_settings
13000 *
13001 * DESCRIPTION: Configure a settings buffer to meet the required use case
13002 *
13003 * PARAMETERS :
13004 *
13005 *
13006 * RETURN : Success: Return valid metadata
13007 * Failure: Return NULL
13008 *==========================================================================*/
13009const camera_metadata_t* QCamera3HardwareInterface::
13010 construct_default_request_settings(const struct camera3_device *device,
13011 int type)
13012{
13013
13014 LOGD("E");
13015 camera_metadata_t* fwk_metadata = NULL;
13016 QCamera3HardwareInterface *hw =
13017 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13018 if (!hw) {
13019 LOGE("NULL camera device");
13020 return NULL;
13021 }
13022
13023 fwk_metadata = hw->translateCapabilityToMetadata(type);
13024
13025 LOGD("X");
13026 return fwk_metadata;
13027}
13028
13029/*===========================================================================
13030 * FUNCTION : process_capture_request
13031 *
13032 * DESCRIPTION:
13033 *
13034 * PARAMETERS :
13035 *
13036 *
13037 * RETURN :
13038 *==========================================================================*/
13039int QCamera3HardwareInterface::process_capture_request(
13040 const struct camera3_device *device,
13041 camera3_capture_request_t *request)
13042{
13043 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013044 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013045 QCamera3HardwareInterface *hw =
13046 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13047 if (!hw) {
13048 LOGE("NULL camera device");
13049 return -EINVAL;
13050 }
13051
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013052 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013053 LOGD("X");
13054 return rc;
13055}
13056
13057/*===========================================================================
13058 * FUNCTION : dump
13059 *
13060 * DESCRIPTION:
13061 *
13062 * PARAMETERS :
13063 *
13064 *
13065 * RETURN :
13066 *==========================================================================*/
13067
13068void QCamera3HardwareInterface::dump(
13069 const struct camera3_device *device, int fd)
13070{
13071 /* Log level property is read when "adb shell dumpsys media.camera" is
13072 called so that the log level can be controlled without restarting
13073 the media server */
13074 getLogLevel();
13075
13076 LOGD("E");
13077 QCamera3HardwareInterface *hw =
13078 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13079 if (!hw) {
13080 LOGE("NULL camera device");
13081 return;
13082 }
13083
13084 hw->dump(fd);
13085 LOGD("X");
13086 return;
13087}
13088
13089/*===========================================================================
13090 * FUNCTION : flush
13091 *
13092 * DESCRIPTION:
13093 *
13094 * PARAMETERS :
13095 *
13096 *
13097 * RETURN :
13098 *==========================================================================*/
13099
13100int QCamera3HardwareInterface::flush(
13101 const struct camera3_device *device)
13102{
13103 int rc;
13104 LOGD("E");
13105 QCamera3HardwareInterface *hw =
13106 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13107 if (!hw) {
13108 LOGE("NULL camera device");
13109 return -EINVAL;
13110 }
13111
13112 pthread_mutex_lock(&hw->mMutex);
13113 // Validate current state
13114 switch (hw->mState) {
13115 case STARTED:
13116 /* valid state */
13117 break;
13118
13119 case ERROR:
13120 pthread_mutex_unlock(&hw->mMutex);
13121 hw->handleCameraDeviceError();
13122 return -ENODEV;
13123
13124 default:
13125 LOGI("Flush returned during state %d", hw->mState);
13126 pthread_mutex_unlock(&hw->mMutex);
13127 return 0;
13128 }
13129 pthread_mutex_unlock(&hw->mMutex);
13130
13131 rc = hw->flush(true /* restart channels */ );
13132 LOGD("X");
13133 return rc;
13134}
13135
13136/*===========================================================================
13137 * FUNCTION : close_camera_device
13138 *
13139 * DESCRIPTION:
13140 *
13141 * PARAMETERS :
13142 *
13143 *
13144 * RETURN :
13145 *==========================================================================*/
13146int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13147{
13148 int ret = NO_ERROR;
13149 QCamera3HardwareInterface *hw =
13150 reinterpret_cast<QCamera3HardwareInterface *>(
13151 reinterpret_cast<camera3_device_t *>(device)->priv);
13152 if (!hw) {
13153 LOGE("NULL camera device");
13154 return BAD_VALUE;
13155 }
13156
13157 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13158 delete hw;
13159 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013160 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013161 return ret;
13162}
13163
13164/*===========================================================================
13165 * FUNCTION : getWaveletDenoiseProcessPlate
13166 *
13167 * DESCRIPTION: query wavelet denoise process plate
13168 *
13169 * PARAMETERS : None
13170 *
13171 * RETURN : WNR prcocess plate value
13172 *==========================================================================*/
13173cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13174{
13175 char prop[PROPERTY_VALUE_MAX];
13176 memset(prop, 0, sizeof(prop));
13177 property_get("persist.denoise.process.plates", prop, "0");
13178 int processPlate = atoi(prop);
13179 switch(processPlate) {
13180 case 0:
13181 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13182 case 1:
13183 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13184 case 2:
13185 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13186 case 3:
13187 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13188 default:
13189 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13190 }
13191}
13192
13193
13194/*===========================================================================
13195 * FUNCTION : getTemporalDenoiseProcessPlate
13196 *
13197 * DESCRIPTION: query temporal denoise process plate
13198 *
13199 * PARAMETERS : None
13200 *
13201 * RETURN : TNR prcocess plate value
13202 *==========================================================================*/
13203cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13204{
13205 char prop[PROPERTY_VALUE_MAX];
13206 memset(prop, 0, sizeof(prop));
13207 property_get("persist.tnr.process.plates", prop, "0");
13208 int processPlate = atoi(prop);
13209 switch(processPlate) {
13210 case 0:
13211 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13212 case 1:
13213 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13214 case 2:
13215 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13216 case 3:
13217 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13218 default:
13219 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13220 }
13221}
13222
13223
13224/*===========================================================================
13225 * FUNCTION : extractSceneMode
13226 *
13227 * DESCRIPTION: Extract scene mode from frameworks set metadata
13228 *
13229 * PARAMETERS :
13230 * @frame_settings: CameraMetadata reference
13231 * @metaMode: ANDROID_CONTORL_MODE
13232 * @hal_metadata: hal metadata structure
13233 *
13234 * RETURN : None
13235 *==========================================================================*/
13236int32_t QCamera3HardwareInterface::extractSceneMode(
13237 const CameraMetadata &frame_settings, uint8_t metaMode,
13238 metadata_buffer_t *hal_metadata)
13239{
13240 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013241 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13242
13243 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13244 LOGD("Ignoring control mode OFF_KEEP_STATE");
13245 return NO_ERROR;
13246 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013247
13248 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13249 camera_metadata_ro_entry entry =
13250 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13251 if (0 == entry.count)
13252 return rc;
13253
13254 uint8_t fwk_sceneMode = entry.data.u8[0];
13255
13256 int val = lookupHalName(SCENE_MODES_MAP,
13257 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13258 fwk_sceneMode);
13259 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013260 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013261 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013262 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013263 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013264
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013265 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13266 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13267 }
13268
13269 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13270 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013271 cam_hdr_param_t hdr_params;
13272 hdr_params.hdr_enable = 1;
13273 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13274 hdr_params.hdr_need_1x = false;
13275 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13276 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13277 rc = BAD_VALUE;
13278 }
13279 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013280
Thierry Strudel3d639192016-09-09 11:52:26 -070013281 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13282 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13283 rc = BAD_VALUE;
13284 }
13285 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013286
13287 if (mForceHdrSnapshot) {
13288 cam_hdr_param_t hdr_params;
13289 hdr_params.hdr_enable = 1;
13290 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13291 hdr_params.hdr_need_1x = false;
13292 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13293 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13294 rc = BAD_VALUE;
13295 }
13296 }
13297
Thierry Strudel3d639192016-09-09 11:52:26 -070013298 return rc;
13299}
13300
13301/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013302 * FUNCTION : setVideoHdrMode
13303 *
13304 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13305 *
13306 * PARAMETERS :
13307 * @hal_metadata: hal metadata structure
13308 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13309 *
13310 * RETURN : None
13311 *==========================================================================*/
13312int32_t QCamera3HardwareInterface::setVideoHdrMode(
13313 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13314{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013315 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13316 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13317 }
13318
13319 LOGE("Invalid Video HDR mode %d!", vhdr);
13320 return BAD_VALUE;
13321}
13322
13323/*===========================================================================
13324 * FUNCTION : setSensorHDR
13325 *
13326 * DESCRIPTION: Enable/disable sensor HDR.
13327 *
13328 * PARAMETERS :
13329 * @hal_metadata: hal metadata structure
13330 * @enable: boolean whether to enable/disable sensor HDR
13331 *
13332 * RETURN : None
13333 *==========================================================================*/
13334int32_t QCamera3HardwareInterface::setSensorHDR(
13335 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13336{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013337 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013338 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13339
13340 if (enable) {
13341 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13342 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13343 #ifdef _LE_CAMERA_
13344 //Default to staggered HDR for IOT
13345 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13346 #else
13347 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13348 #endif
13349 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13350 }
13351
13352 bool isSupported = false;
13353 switch (sensor_hdr) {
13354 case CAM_SENSOR_HDR_IN_SENSOR:
13355 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13356 CAM_QCOM_FEATURE_SENSOR_HDR) {
13357 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013358 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013359 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013360 break;
13361 case CAM_SENSOR_HDR_ZIGZAG:
13362 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13363 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13364 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013365 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013366 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013367 break;
13368 case CAM_SENSOR_HDR_STAGGERED:
13369 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13370 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13371 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013372 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013373 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013374 break;
13375 case CAM_SENSOR_HDR_OFF:
13376 isSupported = true;
13377 LOGD("Turning off sensor HDR");
13378 break;
13379 default:
13380 LOGE("HDR mode %d not supported", sensor_hdr);
13381 rc = BAD_VALUE;
13382 break;
13383 }
13384
13385 if(isSupported) {
13386 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13387 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13388 rc = BAD_VALUE;
13389 } else {
13390 if(!isVideoHdrEnable)
13391 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013392 }
13393 }
13394 return rc;
13395}
13396
13397/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013398 * FUNCTION : needRotationReprocess
13399 *
13400 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13401 *
13402 * PARAMETERS : none
13403 *
13404 * RETURN : true: needed
13405 * false: no need
13406 *==========================================================================*/
13407bool QCamera3HardwareInterface::needRotationReprocess()
13408{
13409 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13410 // current rotation is not zero, and pp has the capability to process rotation
13411 LOGH("need do reprocess for rotation");
13412 return true;
13413 }
13414
13415 return false;
13416}
13417
13418/*===========================================================================
13419 * FUNCTION : needReprocess
13420 *
13421 * DESCRIPTION: if reprocess in needed
13422 *
13423 * PARAMETERS : none
13424 *
13425 * RETURN : true: needed
13426 * false: no need
13427 *==========================================================================*/
13428bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13429{
13430 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13431 // TODO: add for ZSL HDR later
13432 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13433 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13434 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13435 return true;
13436 } else {
13437 LOGH("already post processed frame");
13438 return false;
13439 }
13440 }
13441 return needRotationReprocess();
13442}
13443
13444/*===========================================================================
13445 * FUNCTION : needJpegExifRotation
13446 *
13447 * DESCRIPTION: if rotation from jpeg is needed
13448 *
13449 * PARAMETERS : none
13450 *
13451 * RETURN : true: needed
13452 * false: no need
13453 *==========================================================================*/
13454bool QCamera3HardwareInterface::needJpegExifRotation()
13455{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013456 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013457 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13458 LOGD("Need use Jpeg EXIF Rotation");
13459 return true;
13460 }
13461 return false;
13462}
13463
13464/*===========================================================================
13465 * FUNCTION : addOfflineReprocChannel
13466 *
13467 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13468 * coming from input channel
13469 *
13470 * PARAMETERS :
13471 * @config : reprocess configuration
13472 * @inputChHandle : pointer to the input (source) channel
13473 *
13474 *
13475 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13476 *==========================================================================*/
13477QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13478 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13479{
13480 int32_t rc = NO_ERROR;
13481 QCamera3ReprocessChannel *pChannel = NULL;
13482
13483 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013484 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13485 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013486 if (NULL == pChannel) {
13487 LOGE("no mem for reprocess channel");
13488 return NULL;
13489 }
13490
13491 rc = pChannel->initialize(IS_TYPE_NONE);
13492 if (rc != NO_ERROR) {
13493 LOGE("init reprocess channel failed, ret = %d", rc);
13494 delete pChannel;
13495 return NULL;
13496 }
13497
13498 // pp feature config
13499 cam_pp_feature_config_t pp_config;
13500 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13501
13502 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13503 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13504 & CAM_QCOM_FEATURE_DSDN) {
13505 //Use CPP CDS incase h/w supports it.
13506 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13507 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13508 }
13509 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13510 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13511 }
13512
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013513 if (config.hdr_param.hdr_enable) {
13514 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13515 pp_config.hdr_param = config.hdr_param;
13516 }
13517
13518 if (mForceHdrSnapshot) {
13519 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13520 pp_config.hdr_param.hdr_enable = 1;
13521 pp_config.hdr_param.hdr_need_1x = 0;
13522 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13523 }
13524
Thierry Strudel3d639192016-09-09 11:52:26 -070013525 rc = pChannel->addReprocStreamsFromSource(pp_config,
13526 config,
13527 IS_TYPE_NONE,
13528 mMetadataChannel);
13529
13530 if (rc != NO_ERROR) {
13531 delete pChannel;
13532 return NULL;
13533 }
13534 return pChannel;
13535}
13536
13537/*===========================================================================
13538 * FUNCTION : getMobicatMask
13539 *
13540 * DESCRIPTION: returns mobicat mask
13541 *
13542 * PARAMETERS : none
13543 *
13544 * RETURN : mobicat mask
13545 *
13546 *==========================================================================*/
13547uint8_t QCamera3HardwareInterface::getMobicatMask()
13548{
13549 return m_MobicatMask;
13550}
13551
13552/*===========================================================================
13553 * FUNCTION : setMobicat
13554 *
13555 * DESCRIPTION: set Mobicat on/off.
13556 *
13557 * PARAMETERS :
13558 * @params : none
13559 *
13560 * RETURN : int32_t type of status
13561 * NO_ERROR -- success
13562 * none-zero failure code
13563 *==========================================================================*/
13564int32_t QCamera3HardwareInterface::setMobicat()
13565{
13566 char value [PROPERTY_VALUE_MAX];
13567 property_get("persist.camera.mobicat", value, "0");
13568 int32_t ret = NO_ERROR;
13569 uint8_t enableMobi = (uint8_t)atoi(value);
13570
13571 if (enableMobi) {
13572 tune_cmd_t tune_cmd;
13573 tune_cmd.type = SET_RELOAD_CHROMATIX;
13574 tune_cmd.module = MODULE_ALL;
13575 tune_cmd.value = TRUE;
13576 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13577 CAM_INTF_PARM_SET_VFE_COMMAND,
13578 tune_cmd);
13579
13580 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13581 CAM_INTF_PARM_SET_PP_COMMAND,
13582 tune_cmd);
13583 }
13584 m_MobicatMask = enableMobi;
13585
13586 return ret;
13587}
13588
13589/*===========================================================================
13590* FUNCTION : getLogLevel
13591*
13592* DESCRIPTION: Reads the log level property into a variable
13593*
13594* PARAMETERS :
13595* None
13596*
13597* RETURN :
13598* None
13599*==========================================================================*/
13600void QCamera3HardwareInterface::getLogLevel()
13601{
13602 char prop[PROPERTY_VALUE_MAX];
13603 uint32_t globalLogLevel = 0;
13604
13605 property_get("persist.camera.hal.debug", prop, "0");
13606 int val = atoi(prop);
13607 if (0 <= val) {
13608 gCamHal3LogLevel = (uint32_t)val;
13609 }
13610
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013611 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013612 gKpiDebugLevel = atoi(prop);
13613
13614 property_get("persist.camera.global.debug", prop, "0");
13615 val = atoi(prop);
13616 if (0 <= val) {
13617 globalLogLevel = (uint32_t)val;
13618 }
13619
13620 /* Highest log level among hal.logs and global.logs is selected */
13621 if (gCamHal3LogLevel < globalLogLevel)
13622 gCamHal3LogLevel = globalLogLevel;
13623
13624 return;
13625}
13626
13627/*===========================================================================
13628 * FUNCTION : validateStreamRotations
13629 *
13630 * DESCRIPTION: Check if the rotations requested are supported
13631 *
13632 * PARAMETERS :
13633 * @stream_list : streams to be configured
13634 *
13635 * RETURN : NO_ERROR on success
13636 * -EINVAL on failure
13637 *
13638 *==========================================================================*/
13639int QCamera3HardwareInterface::validateStreamRotations(
13640 camera3_stream_configuration_t *streamList)
13641{
13642 int rc = NO_ERROR;
13643
13644 /*
13645 * Loop through all streams requested in configuration
13646 * Check if unsupported rotations have been requested on any of them
13647 */
13648 for (size_t j = 0; j < streamList->num_streams; j++){
13649 camera3_stream_t *newStream = streamList->streams[j];
13650
13651 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13652 bool isImplDef = (newStream->format ==
13653 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13654 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13655 isImplDef);
13656
13657 if (isRotated && (!isImplDef || isZsl)) {
13658 LOGE("Error: Unsupported rotation of %d requested for stream"
13659 "type:%d and stream format:%d",
13660 newStream->rotation, newStream->stream_type,
13661 newStream->format);
13662 rc = -EINVAL;
13663 break;
13664 }
13665 }
13666
13667 return rc;
13668}
13669
13670/*===========================================================================
13671* FUNCTION : getFlashInfo
13672*
13673* DESCRIPTION: Retrieve information about whether the device has a flash.
13674*
13675* PARAMETERS :
13676* @cameraId : Camera id to query
13677* @hasFlash : Boolean indicating whether there is a flash device
13678* associated with given camera
13679* @flashNode : If a flash device exists, this will be its device node.
13680*
13681* RETURN :
13682* None
13683*==========================================================================*/
13684void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13685 bool& hasFlash,
13686 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13687{
13688 cam_capability_t* camCapability = gCamCapability[cameraId];
13689 if (NULL == camCapability) {
13690 hasFlash = false;
13691 flashNode[0] = '\0';
13692 } else {
13693 hasFlash = camCapability->flash_available;
13694 strlcpy(flashNode,
13695 (char*)camCapability->flash_dev_name,
13696 QCAMERA_MAX_FILEPATH_LENGTH);
13697 }
13698}
13699
13700/*===========================================================================
13701* FUNCTION : getEepromVersionInfo
13702*
13703* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13704*
13705* PARAMETERS : None
13706*
13707* RETURN : string describing EEPROM version
13708* "\0" if no such info available
13709*==========================================================================*/
13710const char *QCamera3HardwareInterface::getEepromVersionInfo()
13711{
13712 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13713}
13714
13715/*===========================================================================
13716* FUNCTION : getLdafCalib
13717*
13718* DESCRIPTION: Retrieve Laser AF calibration data
13719*
13720* PARAMETERS : None
13721*
13722* RETURN : Two uint32_t describing laser AF calibration data
13723* NULL if none is available.
13724*==========================================================================*/
13725const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13726{
13727 if (mLdafCalibExist) {
13728 return &mLdafCalib[0];
13729 } else {
13730 return NULL;
13731 }
13732}
13733
13734/*===========================================================================
13735 * FUNCTION : dynamicUpdateMetaStreamInfo
13736 *
13737 * DESCRIPTION: This function:
13738 * (1) stops all the channels
13739 * (2) returns error on pending requests and buffers
13740 * (3) sends metastream_info in setparams
13741 * (4) starts all channels
13742 * This is useful when sensor has to be restarted to apply any
13743 * settings such as frame rate from a different sensor mode
13744 *
13745 * PARAMETERS : None
13746 *
13747 * RETURN : NO_ERROR on success
13748 * Error codes on failure
13749 *
13750 *==========================================================================*/
13751int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13752{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013753 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013754 int rc = NO_ERROR;
13755
13756 LOGD("E");
13757
13758 rc = stopAllChannels();
13759 if (rc < 0) {
13760 LOGE("stopAllChannels failed");
13761 return rc;
13762 }
13763
13764 rc = notifyErrorForPendingRequests();
13765 if (rc < 0) {
13766 LOGE("notifyErrorForPendingRequests failed");
13767 return rc;
13768 }
13769
13770 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13771 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13772 "Format:%d",
13773 mStreamConfigInfo.type[i],
13774 mStreamConfigInfo.stream_sizes[i].width,
13775 mStreamConfigInfo.stream_sizes[i].height,
13776 mStreamConfigInfo.postprocess_mask[i],
13777 mStreamConfigInfo.format[i]);
13778 }
13779
13780 /* Send meta stream info once again so that ISP can start */
13781 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13782 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13783 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13784 mParameters);
13785 if (rc < 0) {
13786 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13787 }
13788
13789 rc = startAllChannels();
13790 if (rc < 0) {
13791 LOGE("startAllChannels failed");
13792 return rc;
13793 }
13794
13795 LOGD("X");
13796 return rc;
13797}
13798
13799/*===========================================================================
13800 * FUNCTION : stopAllChannels
13801 *
13802 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13803 *
13804 * PARAMETERS : None
13805 *
13806 * RETURN : NO_ERROR on success
13807 * Error codes on failure
13808 *
13809 *==========================================================================*/
13810int32_t QCamera3HardwareInterface::stopAllChannels()
13811{
13812 int32_t rc = NO_ERROR;
13813
13814 LOGD("Stopping all channels");
13815 // Stop the Streams/Channels
13816 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13817 it != mStreamInfo.end(); it++) {
13818 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13819 if (channel) {
13820 channel->stop();
13821 }
13822 (*it)->status = INVALID;
13823 }
13824
13825 if (mSupportChannel) {
13826 mSupportChannel->stop();
13827 }
13828 if (mAnalysisChannel) {
13829 mAnalysisChannel->stop();
13830 }
13831 if (mRawDumpChannel) {
13832 mRawDumpChannel->stop();
13833 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013834 if (mHdrPlusRawSrcChannel) {
13835 mHdrPlusRawSrcChannel->stop();
13836 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013837 if (mMetadataChannel) {
13838 /* If content of mStreamInfo is not 0, there is metadata stream */
13839 mMetadataChannel->stop();
13840 }
13841
13842 LOGD("All channels stopped");
13843 return rc;
13844}
13845
13846/*===========================================================================
13847 * FUNCTION : startAllChannels
13848 *
13849 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13850 *
13851 * PARAMETERS : None
13852 *
13853 * RETURN : NO_ERROR on success
13854 * Error codes on failure
13855 *
13856 *==========================================================================*/
13857int32_t QCamera3HardwareInterface::startAllChannels()
13858{
13859 int32_t rc = NO_ERROR;
13860
13861 LOGD("Start all channels ");
13862 // Start the Streams/Channels
13863 if (mMetadataChannel) {
13864 /* If content of mStreamInfo is not 0, there is metadata stream */
13865 rc = mMetadataChannel->start();
13866 if (rc < 0) {
13867 LOGE("META channel start failed");
13868 return rc;
13869 }
13870 }
13871 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13872 it != mStreamInfo.end(); it++) {
13873 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13874 if (channel) {
13875 rc = channel->start();
13876 if (rc < 0) {
13877 LOGE("channel start failed");
13878 return rc;
13879 }
13880 }
13881 }
13882 if (mAnalysisChannel) {
13883 mAnalysisChannel->start();
13884 }
13885 if (mSupportChannel) {
13886 rc = mSupportChannel->start();
13887 if (rc < 0) {
13888 LOGE("Support channel start failed");
13889 return rc;
13890 }
13891 }
13892 if (mRawDumpChannel) {
13893 rc = mRawDumpChannel->start();
13894 if (rc < 0) {
13895 LOGE("RAW dump channel start failed");
13896 return rc;
13897 }
13898 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013899 if (mHdrPlusRawSrcChannel) {
13900 rc = mHdrPlusRawSrcChannel->start();
13901 if (rc < 0) {
13902 LOGE("HDR+ RAW channel start failed");
13903 return rc;
13904 }
13905 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013906
13907 LOGD("All channels started");
13908 return rc;
13909}
13910
13911/*===========================================================================
13912 * FUNCTION : notifyErrorForPendingRequests
13913 *
13914 * DESCRIPTION: This function sends error for all the pending requests/buffers
13915 *
13916 * PARAMETERS : None
13917 *
13918 * RETURN : Error codes
13919 * NO_ERROR on success
13920 *
13921 *==========================================================================*/
13922int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13923{
13924 int32_t rc = NO_ERROR;
13925 unsigned int frameNum = 0;
13926 camera3_capture_result_t result;
13927 camera3_stream_buffer_t *pStream_Buf = NULL;
13928
13929 memset(&result, 0, sizeof(camera3_capture_result_t));
13930
13931 if (mPendingRequestsList.size() > 0) {
13932 pendingRequestIterator i = mPendingRequestsList.begin();
13933 frameNum = i->frame_number;
13934 } else {
13935 /* There might still be pending buffers even though there are
13936 no pending requests. Setting the frameNum to MAX so that
13937 all the buffers with smaller frame numbers are returned */
13938 frameNum = UINT_MAX;
13939 }
13940
13941 LOGH("Oldest frame num on mPendingRequestsList = %u",
13942 frameNum);
13943
Emilian Peev7650c122017-01-19 08:24:33 -080013944 notifyErrorFoPendingDepthData(mDepthChannel);
13945
Thierry Strudel3d639192016-09-09 11:52:26 -070013946 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13947 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13948
13949 if (req->frame_number < frameNum) {
13950 // Send Error notify to frameworks for each buffer for which
13951 // metadata buffer is already sent
13952 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13953 req->frame_number, req->mPendingBufferList.size());
13954
13955 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13956 if (NULL == pStream_Buf) {
13957 LOGE("No memory for pending buffers array");
13958 return NO_MEMORY;
13959 }
13960 memset(pStream_Buf, 0,
13961 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13962 result.result = NULL;
13963 result.frame_number = req->frame_number;
13964 result.num_output_buffers = req->mPendingBufferList.size();
13965 result.output_buffers = pStream_Buf;
13966
13967 size_t index = 0;
13968 for (auto info = req->mPendingBufferList.begin();
13969 info != req->mPendingBufferList.end(); ) {
13970
13971 camera3_notify_msg_t notify_msg;
13972 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13973 notify_msg.type = CAMERA3_MSG_ERROR;
13974 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13975 notify_msg.message.error.error_stream = info->stream;
13976 notify_msg.message.error.frame_number = req->frame_number;
13977 pStream_Buf[index].acquire_fence = -1;
13978 pStream_Buf[index].release_fence = -1;
13979 pStream_Buf[index].buffer = info->buffer;
13980 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13981 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013982 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013983 index++;
13984 // Remove buffer from list
13985 info = req->mPendingBufferList.erase(info);
13986 }
13987
13988 // Remove this request from Map
13989 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13990 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13991 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13992
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013993 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013994
13995 delete [] pStream_Buf;
13996 } else {
13997
13998 // Go through the pending requests info and send error request to framework
13999 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
14000
14001 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
14002
14003 // Send error notify to frameworks
14004 camera3_notify_msg_t notify_msg;
14005 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14006 notify_msg.type = CAMERA3_MSG_ERROR;
14007 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14008 notify_msg.message.error.error_stream = NULL;
14009 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014010 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014011
14012 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
14013 if (NULL == pStream_Buf) {
14014 LOGE("No memory for pending buffers array");
14015 return NO_MEMORY;
14016 }
14017 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
14018
14019 result.result = NULL;
14020 result.frame_number = req->frame_number;
14021 result.input_buffer = i->input_buffer;
14022 result.num_output_buffers = req->mPendingBufferList.size();
14023 result.output_buffers = pStream_Buf;
14024
14025 size_t index = 0;
14026 for (auto info = req->mPendingBufferList.begin();
14027 info != req->mPendingBufferList.end(); ) {
14028 pStream_Buf[index].acquire_fence = -1;
14029 pStream_Buf[index].release_fence = -1;
14030 pStream_Buf[index].buffer = info->buffer;
14031 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
14032 pStream_Buf[index].stream = info->stream;
14033 index++;
14034 // Remove buffer from list
14035 info = req->mPendingBufferList.erase(info);
14036 }
14037
14038 // Remove this request from Map
14039 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
14040 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
14041 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
14042
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014043 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014044 delete [] pStream_Buf;
14045 i = erasePendingRequest(i);
14046 }
14047 }
14048
14049 /* Reset pending frame Drop list and requests list */
14050 mPendingFrameDropList.clear();
14051
14052 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
14053 req.mPendingBufferList.clear();
14054 }
14055 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014056 LOGH("Cleared all the pending buffers ");
14057
14058 return rc;
14059}
14060
14061bool QCamera3HardwareInterface::isOnEncoder(
14062 const cam_dimension_t max_viewfinder_size,
14063 uint32_t width, uint32_t height)
14064{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014065 return ((width > (uint32_t)max_viewfinder_size.width) ||
14066 (height > (uint32_t)max_viewfinder_size.height) ||
14067 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14068 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014069}
14070
14071/*===========================================================================
14072 * FUNCTION : setBundleInfo
14073 *
14074 * DESCRIPTION: Set bundle info for all streams that are bundle.
14075 *
14076 * PARAMETERS : None
14077 *
14078 * RETURN : NO_ERROR on success
14079 * Error codes on failure
14080 *==========================================================================*/
14081int32_t QCamera3HardwareInterface::setBundleInfo()
14082{
14083 int32_t rc = NO_ERROR;
14084
14085 if (mChannelHandle) {
14086 cam_bundle_config_t bundleInfo;
14087 memset(&bundleInfo, 0, sizeof(bundleInfo));
14088 rc = mCameraHandle->ops->get_bundle_info(
14089 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14090 if (rc != NO_ERROR) {
14091 LOGE("get_bundle_info failed");
14092 return rc;
14093 }
14094 if (mAnalysisChannel) {
14095 mAnalysisChannel->setBundleInfo(bundleInfo);
14096 }
14097 if (mSupportChannel) {
14098 mSupportChannel->setBundleInfo(bundleInfo);
14099 }
14100 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14101 it != mStreamInfo.end(); it++) {
14102 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14103 channel->setBundleInfo(bundleInfo);
14104 }
14105 if (mRawDumpChannel) {
14106 mRawDumpChannel->setBundleInfo(bundleInfo);
14107 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014108 if (mHdrPlusRawSrcChannel) {
14109 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14110 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014111 }
14112
14113 return rc;
14114}
14115
14116/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014117 * FUNCTION : setInstantAEC
14118 *
14119 * DESCRIPTION: Set Instant AEC related params.
14120 *
14121 * PARAMETERS :
14122 * @meta: CameraMetadata reference
14123 *
14124 * RETURN : NO_ERROR on success
14125 * Error codes on failure
14126 *==========================================================================*/
14127int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14128{
14129 int32_t rc = NO_ERROR;
14130 uint8_t val = 0;
14131 char prop[PROPERTY_VALUE_MAX];
14132
14133 // First try to configure instant AEC from framework metadata
14134 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14135 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14136 }
14137
14138 // If framework did not set this value, try to read from set prop.
14139 if (val == 0) {
14140 memset(prop, 0, sizeof(prop));
14141 property_get("persist.camera.instant.aec", prop, "0");
14142 val = (uint8_t)atoi(prop);
14143 }
14144
14145 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14146 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14147 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14148 mInstantAEC = val;
14149 mInstantAECSettledFrameNumber = 0;
14150 mInstantAecFrameIdxCount = 0;
14151 LOGH("instantAEC value set %d",val);
14152 if (mInstantAEC) {
14153 memset(prop, 0, sizeof(prop));
14154 property_get("persist.camera.ae.instant.bound", prop, "10");
14155 int32_t aec_frame_skip_cnt = atoi(prop);
14156 if (aec_frame_skip_cnt >= 0) {
14157 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14158 } else {
14159 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14160 rc = BAD_VALUE;
14161 }
14162 }
14163 } else {
14164 LOGE("Bad instant aec value set %d", val);
14165 rc = BAD_VALUE;
14166 }
14167 return rc;
14168}
14169
14170/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014171 * FUNCTION : get_num_overall_buffers
14172 *
14173 * DESCRIPTION: Estimate number of pending buffers across all requests.
14174 *
14175 * PARAMETERS : None
14176 *
14177 * RETURN : Number of overall pending buffers
14178 *
14179 *==========================================================================*/
14180uint32_t PendingBuffersMap::get_num_overall_buffers()
14181{
14182 uint32_t sum_buffers = 0;
14183 for (auto &req : mPendingBuffersInRequest) {
14184 sum_buffers += req.mPendingBufferList.size();
14185 }
14186 return sum_buffers;
14187}
14188
14189/*===========================================================================
14190 * FUNCTION : removeBuf
14191 *
14192 * DESCRIPTION: Remove a matching buffer from tracker.
14193 *
14194 * PARAMETERS : @buffer: image buffer for the callback
14195 *
14196 * RETURN : None
14197 *
14198 *==========================================================================*/
14199void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14200{
14201 bool buffer_found = false;
14202 for (auto req = mPendingBuffersInRequest.begin();
14203 req != mPendingBuffersInRequest.end(); req++) {
14204 for (auto k = req->mPendingBufferList.begin();
14205 k != req->mPendingBufferList.end(); k++ ) {
14206 if (k->buffer == buffer) {
14207 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14208 req->frame_number, buffer);
14209 k = req->mPendingBufferList.erase(k);
14210 if (req->mPendingBufferList.empty()) {
14211 // Remove this request from Map
14212 req = mPendingBuffersInRequest.erase(req);
14213 }
14214 buffer_found = true;
14215 break;
14216 }
14217 }
14218 if (buffer_found) {
14219 break;
14220 }
14221 }
14222 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14223 get_num_overall_buffers());
14224}
14225
14226/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014227 * FUNCTION : getBufErrStatus
14228 *
14229 * DESCRIPTION: get buffer error status
14230 *
14231 * PARAMETERS : @buffer: buffer handle
14232 *
14233 * RETURN : Error status
14234 *
14235 *==========================================================================*/
14236int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14237{
14238 for (auto& req : mPendingBuffersInRequest) {
14239 for (auto& k : req.mPendingBufferList) {
14240 if (k.buffer == buffer)
14241 return k.bufStatus;
14242 }
14243 }
14244 return CAMERA3_BUFFER_STATUS_OK;
14245}
14246
14247/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014248 * FUNCTION : setPAAFSupport
14249 *
14250 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14251 * feature mask according to stream type and filter
14252 * arrangement
14253 *
14254 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14255 * @stream_type: stream type
14256 * @filter_arrangement: filter arrangement
14257 *
14258 * RETURN : None
14259 *==========================================================================*/
14260void QCamera3HardwareInterface::setPAAFSupport(
14261 cam_feature_mask_t& feature_mask,
14262 cam_stream_type_t stream_type,
14263 cam_color_filter_arrangement_t filter_arrangement)
14264{
Thierry Strudel3d639192016-09-09 11:52:26 -070014265 switch (filter_arrangement) {
14266 case CAM_FILTER_ARRANGEMENT_RGGB:
14267 case CAM_FILTER_ARRANGEMENT_GRBG:
14268 case CAM_FILTER_ARRANGEMENT_GBRG:
14269 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014270 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14271 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014272 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014273 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14274 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014275 }
14276 break;
14277 case CAM_FILTER_ARRANGEMENT_Y:
14278 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14279 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14280 }
14281 break;
14282 default:
14283 break;
14284 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014285 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14286 feature_mask, stream_type, filter_arrangement);
14287
14288
Thierry Strudel3d639192016-09-09 11:52:26 -070014289}
14290
14291/*===========================================================================
14292* FUNCTION : getSensorMountAngle
14293*
14294* DESCRIPTION: Retrieve sensor mount angle
14295*
14296* PARAMETERS : None
14297*
14298* RETURN : sensor mount angle in uint32_t
14299*==========================================================================*/
14300uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14301{
14302 return gCamCapability[mCameraId]->sensor_mount_angle;
14303}
14304
14305/*===========================================================================
14306* FUNCTION : getRelatedCalibrationData
14307*
14308* DESCRIPTION: Retrieve related system calibration data
14309*
14310* PARAMETERS : None
14311*
14312* RETURN : Pointer of related system calibration data
14313*==========================================================================*/
14314const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14315{
14316 return (const cam_related_system_calibration_data_t *)
14317 &(gCamCapability[mCameraId]->related_cam_calibration);
14318}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014319
14320/*===========================================================================
14321 * FUNCTION : is60HzZone
14322 *
14323 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14324 *
14325 * PARAMETERS : None
14326 *
14327 * RETURN : True if in 60Hz zone, False otherwise
14328 *==========================================================================*/
14329bool QCamera3HardwareInterface::is60HzZone()
14330{
14331 time_t t = time(NULL);
14332 struct tm lt;
14333
14334 struct tm* r = localtime_r(&t, &lt);
14335
14336 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14337 return true;
14338 else
14339 return false;
14340}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014341
14342/*===========================================================================
14343 * FUNCTION : adjustBlackLevelForCFA
14344 *
14345 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14346 * of bayer CFA (Color Filter Array).
14347 *
14348 * PARAMETERS : @input: black level pattern in the order of RGGB
14349 * @output: black level pattern in the order of CFA
14350 * @color_arrangement: CFA color arrangement
14351 *
14352 * RETURN : None
14353 *==========================================================================*/
14354template<typename T>
14355void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14356 T input[BLACK_LEVEL_PATTERN_CNT],
14357 T output[BLACK_LEVEL_PATTERN_CNT],
14358 cam_color_filter_arrangement_t color_arrangement)
14359{
14360 switch (color_arrangement) {
14361 case CAM_FILTER_ARRANGEMENT_GRBG:
14362 output[0] = input[1];
14363 output[1] = input[0];
14364 output[2] = input[3];
14365 output[3] = input[2];
14366 break;
14367 case CAM_FILTER_ARRANGEMENT_GBRG:
14368 output[0] = input[2];
14369 output[1] = input[3];
14370 output[2] = input[0];
14371 output[3] = input[1];
14372 break;
14373 case CAM_FILTER_ARRANGEMENT_BGGR:
14374 output[0] = input[3];
14375 output[1] = input[2];
14376 output[2] = input[1];
14377 output[3] = input[0];
14378 break;
14379 case CAM_FILTER_ARRANGEMENT_RGGB:
14380 output[0] = input[0];
14381 output[1] = input[1];
14382 output[2] = input[2];
14383 output[3] = input[3];
14384 break;
14385 default:
14386 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14387 break;
14388 }
14389}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014390
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014391void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14392 CameraMetadata &resultMetadata,
14393 std::shared_ptr<metadata_buffer_t> settings)
14394{
14395 if (settings == nullptr) {
14396 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14397 return;
14398 }
14399
14400 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14401 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14402 }
14403
14404 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14405 String8 str((const char *)gps_methods);
14406 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14407 }
14408
14409 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14410 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14411 }
14412
14413 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14414 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14415 }
14416
14417 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14418 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14419 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14420 }
14421
14422 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14423 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14424 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14425 }
14426
14427 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14428 int32_t fwk_thumb_size[2];
14429 fwk_thumb_size[0] = thumb_size->width;
14430 fwk_thumb_size[1] = thumb_size->height;
14431 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14432 }
14433
14434 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14435 uint8_t fwk_intent = intent[0];
14436 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14437 }
14438}
14439
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014440bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14441 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14442 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014443{
14444 if (hdrPlusRequest == nullptr) return false;
14445
14446 // Check noise reduction mode is high quality.
14447 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14448 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14449 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014450 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14451 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014452 return false;
14453 }
14454
14455 // Check edge mode is high quality.
14456 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14457 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14458 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14459 return false;
14460 }
14461
14462 if (request.num_output_buffers != 1 ||
14463 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14464 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014465 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14466 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14467 request.output_buffers[0].stream->width,
14468 request.output_buffers[0].stream->height,
14469 request.output_buffers[0].stream->format);
14470 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014471 return false;
14472 }
14473
14474 // Get a YUV buffer from pic channel.
14475 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14476 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14477 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14478 if (res != OK) {
14479 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14480 __FUNCTION__, strerror(-res), res);
14481 return false;
14482 }
14483
14484 pbcamera::StreamBuffer buffer;
14485 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014486 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014487 buffer.data = yuvBuffer->buffer;
14488 buffer.dataSize = yuvBuffer->frame_len;
14489
14490 pbcamera::CaptureRequest pbRequest;
14491 pbRequest.id = request.frame_number;
14492 pbRequest.outputBuffers.push_back(buffer);
14493
14494 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014495 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014496 if (res != OK) {
14497 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14498 strerror(-res), res);
14499 return false;
14500 }
14501
14502 hdrPlusRequest->yuvBuffer = yuvBuffer;
14503 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14504
14505 return true;
14506}
14507
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014508status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14509{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014510 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14511 return OK;
14512 }
14513
14514 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14515 if (res != OK) {
14516 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14517 strerror(-res), res);
14518 return res;
14519 }
14520 gHdrPlusClientOpening = true;
14521
14522 return OK;
14523}
14524
Chien-Yu Chenee335912017-02-09 17:53:20 -080014525status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14526{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014527 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014528
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014529 // Check if gHdrPlusClient is opened or being opened.
14530 if (gHdrPlusClient == nullptr) {
14531 if (gHdrPlusClientOpening) {
14532 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14533 return OK;
14534 }
14535
14536 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014537 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014538 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14539 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014540 return res;
14541 }
14542
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014543 // When opening HDR+ client completes, HDR+ mode will be enabled.
14544 return OK;
14545
Chien-Yu Chenee335912017-02-09 17:53:20 -080014546 }
14547
14548 // Configure stream for HDR+.
14549 res = configureHdrPlusStreamsLocked();
14550 if (res != OK) {
14551 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014552 return res;
14553 }
14554
14555 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14556 res = gHdrPlusClient->setZslHdrPlusMode(true);
14557 if (res != OK) {
14558 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014559 return res;
14560 }
14561
14562 mHdrPlusModeEnabled = true;
14563 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14564
14565 return OK;
14566}
14567
14568void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14569{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014570 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014571 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014572 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14573 if (res != OK) {
14574 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14575 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014576
14577 // Close HDR+ client so Easel can enter low power mode.
14578 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14579 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014580 }
14581
14582 mHdrPlusModeEnabled = false;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014583 gHdrPlusClientOpening = false;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014584 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14585}
14586
14587status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014588{
14589 pbcamera::InputConfiguration inputConfig;
14590 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14591 status_t res = OK;
14592
14593 // Configure HDR+ client streams.
14594 // Get input config.
14595 if (mHdrPlusRawSrcChannel) {
14596 // HDR+ input buffers will be provided by HAL.
14597 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14598 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14599 if (res != OK) {
14600 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14601 __FUNCTION__, strerror(-res), res);
14602 return res;
14603 }
14604
14605 inputConfig.isSensorInput = false;
14606 } else {
14607 // Sensor MIPI will send data to Easel.
14608 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014609 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014610 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14611 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14612 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14613 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14614 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14615 if (mSensorModeInfo.num_raw_bits != 10) {
14616 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14617 mSensorModeInfo.num_raw_bits);
14618 return BAD_VALUE;
14619 }
14620
14621 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014622 }
14623
14624 // Get output configurations.
14625 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014626 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014627
14628 // Easel may need to output YUV output buffers if mPictureChannel was created.
14629 pbcamera::StreamConfiguration yuvOutputConfig;
14630 if (mPictureChannel != nullptr) {
14631 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14632 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14633 if (res != OK) {
14634 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14635 __FUNCTION__, strerror(-res), res);
14636
14637 return res;
14638 }
14639
14640 outputStreamConfigs.push_back(yuvOutputConfig);
14641 }
14642
14643 // TODO: consider other channels for YUV output buffers.
14644
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014645 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014646 if (res != OK) {
14647 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14648 strerror(-res), res);
14649 return res;
14650 }
14651
14652 return OK;
14653}
14654
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014655void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
14656{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014657 if (client == nullptr) {
14658 ALOGE("%s: Opened client is null.", __FUNCTION__);
14659 return;
14660 }
14661
Chien-Yu Chene96475e2017-04-11 11:53:26 -070014662 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014663 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14664
14665 Mutex::Autolock l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014666 if (!gHdrPlusClientOpening) {
14667 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
14668 return;
14669 }
14670
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014671 gHdrPlusClient = std::move(client);
14672 gHdrPlusClientOpening = false;
14673
14674 // Set static metadata.
14675 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14676 if (res != OK) {
14677 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14678 __FUNCTION__, strerror(-res), res);
14679 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14680 gHdrPlusClient = nullptr;
14681 return;
14682 }
14683
14684 // Enable HDR+ mode.
14685 res = enableHdrPlusModeLocked();
14686 if (res != OK) {
14687 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14688 }
14689}
14690
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014691void QCamera3HardwareInterface::onOpenFailed(status_t err)
14692{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014693 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14694 Mutex::Autolock l(gHdrPlusClientLock);
14695 gHdrPlusClientOpening = false;
14696}
14697
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014698void QCamera3HardwareInterface::onFatalError()
14699{
14700 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
14701
14702 // Set HAL state to error.
14703 pthread_mutex_lock(&mMutex);
14704 mState = ERROR;
14705 pthread_mutex_unlock(&mMutex);
14706
14707 handleCameraDeviceError();
14708}
14709
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014710void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014711 const camera_metadata_t &resultMetadata)
14712{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014713 if (result != nullptr) {
14714 if (result->outputBuffers.size() != 1) {
14715 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14716 result->outputBuffers.size());
14717 return;
14718 }
14719
14720 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14721 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14722 result->outputBuffers[0].streamId);
14723 return;
14724 }
14725
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014726 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014727 HdrPlusPendingRequest pendingRequest;
14728 {
14729 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14730 auto req = mHdrPlusPendingRequests.find(result->requestId);
14731 pendingRequest = req->second;
14732 }
14733
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014734 // Update the result metadata with the settings of the HDR+ still capture request because
14735 // the result metadata belongs to a ZSL buffer.
14736 CameraMetadata metadata;
14737 metadata = &resultMetadata;
14738 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14739 camera_metadata_t* updatedResultMetadata = metadata.release();
14740
14741 QCamera3PicChannel *picChannel =
14742 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14743
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014744 // Check if dumping HDR+ YUV output is enabled.
14745 char prop[PROPERTY_VALUE_MAX];
14746 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14747 bool dumpYuvOutput = atoi(prop);
14748
14749 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014750 // Dump yuv buffer to a ppm file.
14751 pbcamera::StreamConfiguration outputConfig;
14752 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14753 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14754 if (rc == OK) {
14755 char buf[FILENAME_MAX] = {};
14756 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14757 result->requestId, result->outputBuffers[0].streamId,
14758 outputConfig.image.width, outputConfig.image.height);
14759
14760 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14761 } else {
14762 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14763 __FUNCTION__, strerror(-rc), rc);
14764 }
14765 }
14766
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014767 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14768 auto halMetadata = std::make_shared<metadata_buffer_t>();
14769 clear_metadata_buffer(halMetadata.get());
14770
14771 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14772 // encoding.
14773 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14774 halStreamId, /*minFrameDuration*/0);
14775 if (res == OK) {
14776 // Return the buffer to pic channel for encoding.
14777 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14778 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14779 halMetadata);
14780 } else {
14781 // Return the buffer without encoding.
14782 // TODO: This should not happen but we may want to report an error buffer to camera
14783 // service.
14784 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14785 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14786 strerror(-res), res);
14787 }
14788
14789 // Send HDR+ metadata to framework.
14790 {
14791 pthread_mutex_lock(&mMutex);
14792
14793 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14794 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14795 pthread_mutex_unlock(&mMutex);
14796 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014797
14798 // Remove the HDR+ pending request.
14799 {
14800 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14801 auto req = mHdrPlusPendingRequests.find(result->requestId);
14802 mHdrPlusPendingRequests.erase(req);
14803 }
14804 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014805}
14806
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014807void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
14808{
14809 if (failedResult == nullptr) {
14810 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
14811 return;
14812 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014813
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014814 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014815
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014816 // Remove the pending HDR+ request.
14817 {
14818 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14819 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14820
14821 // Return the buffer to pic channel.
14822 QCamera3PicChannel *picChannel =
14823 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14824 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14825
14826 mHdrPlusPendingRequests.erase(pendingRequest);
14827 }
14828
14829 pthread_mutex_lock(&mMutex);
14830
14831 // Find the pending buffers.
14832 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
14833 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14834 if (pendingBuffers->frame_number == failedResult->requestId) {
14835 break;
14836 }
14837 pendingBuffers++;
14838 }
14839
14840 // Send out buffer errors for the pending buffers.
14841 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14842 std::vector<camera3_stream_buffer_t> streamBuffers;
14843 for (auto &buffer : pendingBuffers->mPendingBufferList) {
14844 // Prepare a stream buffer.
14845 camera3_stream_buffer_t streamBuffer = {};
14846 streamBuffer.stream = buffer.stream;
14847 streamBuffer.buffer = buffer.buffer;
14848 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14849 streamBuffer.acquire_fence = -1;
14850 streamBuffer.release_fence = -1;
14851
14852 streamBuffers.push_back(streamBuffer);
14853
14854 // Send out error buffer event.
14855 camera3_notify_msg_t notify_msg = {};
14856 notify_msg.type = CAMERA3_MSG_ERROR;
14857 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
14858 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
14859 notify_msg.message.error.error_stream = buffer.stream;
14860
14861 orchestrateNotify(&notify_msg);
14862 }
14863
14864 camera3_capture_result_t result = {};
14865 result.frame_number = pendingBuffers->frame_number;
14866 result.num_output_buffers = streamBuffers.size();
14867 result.output_buffers = &streamBuffers[0];
14868
14869 // Send out result with buffer errors.
14870 orchestrateResult(&result);
14871
14872 // Remove pending buffers.
14873 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
14874 }
14875
14876 // Remove pending request.
14877 auto halRequest = mPendingRequestsList.begin();
14878 while (halRequest != mPendingRequestsList.end()) {
14879 if (halRequest->frame_number == failedResult->requestId) {
14880 mPendingRequestsList.erase(halRequest);
14881 break;
14882 }
14883 halRequest++;
14884 }
14885
14886 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014887}
14888
Thierry Strudel3d639192016-09-09 11:52:26 -070014889}; //end namespace qcamera