blob: 5f20e72788ca258eb8341958e09427c8f116964a [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700134#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700135
Thierry Strudel3d639192016-09-09 11:52:26 -0700136cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
137const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
138extern pthread_mutex_t gCamLock;
139volatile uint32_t gCamHal3LogLevel = 1;
140extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700141
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800142// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143// The following Easel related variables must be protected by gHdrPlusClientLock.
144EaselManagerClient gEaselManagerClient;
145bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
146std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
147bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700148bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700149bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800151// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
152bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
154Mutex gHdrPlusClientLock; // Protect above Easel related variables.
155
Thierry Strudel3d639192016-09-09 11:52:26 -0700156
157const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
158 {"On", CAM_CDS_MODE_ON},
159 {"Off", CAM_CDS_MODE_OFF},
160 {"Auto",CAM_CDS_MODE_AUTO}
161};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700162const QCamera3HardwareInterface::QCameraMap<
163 camera_metadata_enum_android_video_hdr_mode_t,
164 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
165 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
166 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
167};
168
Thierry Strudel54dc9782017-02-15 12:12:10 -0800169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_binning_correction_mode_t,
171 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
172 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
173 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
174};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700175
176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_ir_mode_t,
178 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
179 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
180 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
181 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
182};
Thierry Strudel3d639192016-09-09 11:52:26 -0700183
184const QCamera3HardwareInterface::QCameraMap<
185 camera_metadata_enum_android_control_effect_mode_t,
186 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
187 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
188 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
189 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
190 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
191 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
192 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
193 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
194 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
195 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
196};
197
198const QCamera3HardwareInterface::QCameraMap<
199 camera_metadata_enum_android_control_awb_mode_t,
200 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
201 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
202 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
203 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
204 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
205 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
206 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
207 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
208 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
209 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
210};
211
212const QCamera3HardwareInterface::QCameraMap<
213 camera_metadata_enum_android_control_scene_mode_t,
214 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
215 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
216 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
217 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
218 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
219 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
220 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
222 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
223 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
224 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
225 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
226 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
227 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
228 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
229 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800230 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
231 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700232};
233
234const QCamera3HardwareInterface::QCameraMap<
235 camera_metadata_enum_android_control_af_mode_t,
236 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
237 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
238 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
239 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
240 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
241 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
242 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
243 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
244};
245
246const QCamera3HardwareInterface::QCameraMap<
247 camera_metadata_enum_android_color_correction_aberration_mode_t,
248 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
249 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
250 CAM_COLOR_CORRECTION_ABERRATION_OFF },
251 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
252 CAM_COLOR_CORRECTION_ABERRATION_FAST },
253 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
254 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_control_ae_antibanding_mode_t,
259 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
260 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
261 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
262 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_control_ae_mode_t,
268 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
269 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
270 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
271 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
272 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
273 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
274};
275
276const QCamera3HardwareInterface::QCameraMap<
277 camera_metadata_enum_android_flash_mode_t,
278 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
279 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
280 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
281 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_statistics_face_detect_mode_t,
286 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
287 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
288 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
289 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
294 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
295 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
296 CAM_FOCUS_UNCALIBRATED },
297 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
298 CAM_FOCUS_APPROXIMATE },
299 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
300 CAM_FOCUS_CALIBRATED }
301};
302
303const QCamera3HardwareInterface::QCameraMap<
304 camera_metadata_enum_android_lens_state_t,
305 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
306 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
307 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
308};
309
310const int32_t available_thumbnail_sizes[] = {0, 0,
311 176, 144,
312 240, 144,
313 256, 144,
314 240, 160,
315 256, 154,
316 240, 240,
317 320, 240};
318
319const QCamera3HardwareInterface::QCameraMap<
320 camera_metadata_enum_android_sensor_test_pattern_mode_t,
321 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
322 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
323 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
324 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
328};
329
330/* Since there is no mapping for all the options some Android enum are not listed.
331 * Also, the order in this list is important because while mapping from HAL to Android it will
332 * traverse from lower to higher index which means that for HAL values that are map to different
333 * Android values, the traverse logic will select the first one found.
334 */
335const QCamera3HardwareInterface::QCameraMap<
336 camera_metadata_enum_android_sensor_reference_illuminant1_t,
337 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
338 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
339 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
340 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
354};
355
356const QCamera3HardwareInterface::QCameraMap<
357 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
358 { 60, CAM_HFR_MODE_60FPS},
359 { 90, CAM_HFR_MODE_90FPS},
360 { 120, CAM_HFR_MODE_120FPS},
361 { 150, CAM_HFR_MODE_150FPS},
362 { 180, CAM_HFR_MODE_180FPS},
363 { 210, CAM_HFR_MODE_210FPS},
364 { 240, CAM_HFR_MODE_240FPS},
365 { 480, CAM_HFR_MODE_480FPS},
366};
367
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700368const QCamera3HardwareInterface::QCameraMap<
369 qcamera3_ext_instant_aec_mode_t,
370 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
371 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
372 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
373 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
374};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800375
376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_exposure_meter_mode_t,
378 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
379 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
380 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
381 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
382 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
383 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
386};
387
388const QCamera3HardwareInterface::QCameraMap<
389 qcamera3_ext_iso_mode_t,
390 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
391 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
392 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
393 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
394 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
395 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
396 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
397 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
398 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
399};
400
Thierry Strudel3d639192016-09-09 11:52:26 -0700401camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
402 .initialize = QCamera3HardwareInterface::initialize,
403 .configure_streams = QCamera3HardwareInterface::configure_streams,
404 .register_stream_buffers = NULL,
405 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
406 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
407 .get_metadata_vendor_tag_ops = NULL,
408 .dump = QCamera3HardwareInterface::dump,
409 .flush = QCamera3HardwareInterface::flush,
410 .reserved = {0},
411};
412
413// initialise to some default value
414uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
415
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700416static inline void logEaselEvent(const char *tag, const char *event) {
417 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
418 struct timespec ts = {};
419 static int64_t kMsPerSec = 1000;
420 static int64_t kNsPerMs = 1000000;
421 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
422 if (res != OK) {
423 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
424 } else {
425 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
426 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
427 }
428 }
429}
430
Thierry Strudel3d639192016-09-09 11:52:26 -0700431/*===========================================================================
432 * FUNCTION : QCamera3HardwareInterface
433 *
434 * DESCRIPTION: constructor of QCamera3HardwareInterface
435 *
436 * PARAMETERS :
437 * @cameraId : camera ID
438 *
439 * RETURN : none
440 *==========================================================================*/
441QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
442 const camera_module_callbacks_t *callbacks)
443 : mCameraId(cameraId),
444 mCameraHandle(NULL),
445 mCameraInitialized(false),
446 mCallbackOps(NULL),
447 mMetadataChannel(NULL),
448 mPictureChannel(NULL),
449 mRawChannel(NULL),
450 mSupportChannel(NULL),
451 mAnalysisChannel(NULL),
452 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700453 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800455 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mChannelHandle(0),
458 mFirstConfiguration(true),
459 mFlush(false),
460 mFlushPerf(false),
461 mParamHeap(NULL),
462 mParameters(NULL),
463 mPrevParameters(NULL),
464 m_bIsVideo(false),
465 m_bIs4KVideo(false),
466 m_bEisSupportedSize(false),
467 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800468 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700469 m_MobicatMask(0),
470 mMinProcessedFrameDuration(0),
471 mMinJpegFrameDuration(0),
472 mMinRawFrameDuration(0),
473 mMetaFrameCount(0U),
474 mUpdateDebugLevel(false),
475 mCallbacks(callbacks),
476 mCaptureIntent(0),
477 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700478 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800479 /* DevCamDebug metadata internal m control*/
480 mDevCamDebugMetaEnable(0),
481 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mBatchSize(0),
483 mToBeQueuedVidBufs(0),
484 mHFRVideoFps(DEFAULT_VIDEO_FPS),
485 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800486 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800487 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700488 mFirstFrameNumberInBatch(0),
489 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800490 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700491 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
492 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000493 mPDSupported(false),
494 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700495 mInstantAEC(false),
496 mResetInstantAEC(false),
497 mInstantAECSettledFrameNumber(0),
498 mAecSkipDisplayFrameBound(0),
499 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800500 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700502 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700503 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700504 mState(CLOSED),
505 mIsDeviceLinked(false),
506 mIsMainCamera(true),
507 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700508 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800509 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800510 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700511 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800512 mIsApInputUsedForHdrPlus(false),
513 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800514 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700515{
516 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700517 mCommon.init(gCamCapability[cameraId]);
518 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700519#ifndef USE_HAL_3_3
520 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
521#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700522 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700523#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700524 mCameraDevice.common.close = close_camera_device;
525 mCameraDevice.ops = &mCameraOps;
526 mCameraDevice.priv = this;
527 gCamCapability[cameraId]->version = CAM_HAL_V3;
528 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
529 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
530 gCamCapability[cameraId]->min_num_pp_bufs = 3;
531
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800532 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700533
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800534 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700535 mPendingLiveRequest = 0;
536 mCurrentRequestId = -1;
537 pthread_mutex_init(&mMutex, NULL);
538
539 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
540 mDefaultMetadata[i] = NULL;
541
542 // Getting system props of different kinds
543 char prop[PROPERTY_VALUE_MAX];
544 memset(prop, 0, sizeof(prop));
545 property_get("persist.camera.raw.dump", prop, "0");
546 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800547 property_get("persist.camera.hal3.force.hdr", prop, "0");
548 mForceHdrSnapshot = atoi(prop);
549
Thierry Strudel3d639192016-09-09 11:52:26 -0700550 if (mEnableRawDump)
551 LOGD("Raw dump from Camera HAL enabled");
552
553 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
554 memset(mLdafCalib, 0, sizeof(mLdafCalib));
555
556 memset(prop, 0, sizeof(prop));
557 property_get("persist.camera.tnr.preview", prop, "0");
558 m_bTnrPreview = (uint8_t)atoi(prop);
559
560 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800561 property_get("persist.camera.swtnr.preview", prop, "1");
562 m_bSwTnrPreview = (uint8_t)atoi(prop);
563
564 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700565 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700566 m_bTnrVideo = (uint8_t)atoi(prop);
567
568 memset(prop, 0, sizeof(prop));
569 property_get("persist.camera.avtimer.debug", prop, "0");
570 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800571 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700572
Thierry Strudel54dc9782017-02-15 12:12:10 -0800573 memset(prop, 0, sizeof(prop));
574 property_get("persist.camera.cacmode.disable", prop, "0");
575 m_cacModeDisabled = (uint8_t)atoi(prop);
576
Thierry Strudel3d639192016-09-09 11:52:26 -0700577 //Load and read GPU library.
578 lib_surface_utils = NULL;
579 LINK_get_surface_pixel_alignment = NULL;
580 mSurfaceStridePadding = CAM_PAD_TO_32;
581 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
582 if (lib_surface_utils) {
583 *(void **)&LINK_get_surface_pixel_alignment =
584 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
585 if (LINK_get_surface_pixel_alignment) {
586 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
587 }
588 dlclose(lib_surface_utils);
589 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700590
Emilian Peev0f3c3162017-03-15 12:57:46 +0000591 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
592 mPDSupported = (0 <= mPDIndex) ? true : false;
593
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700594 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700595}
596
597/*===========================================================================
598 * FUNCTION : ~QCamera3HardwareInterface
599 *
600 * DESCRIPTION: destructor of QCamera3HardwareInterface
601 *
602 * PARAMETERS : none
603 *
604 * RETURN : none
605 *==========================================================================*/
606QCamera3HardwareInterface::~QCamera3HardwareInterface()
607{
608 LOGD("E");
609
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800610 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700611
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800612 // Disable power hint and enable the perf lock for close camera
613 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
614 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
615
616 // unlink of dualcam during close camera
617 if (mIsDeviceLinked) {
618 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
619 &m_pDualCamCmdPtr->bundle_info;
620 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
621 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
622 pthread_mutex_lock(&gCamLock);
623
624 if (mIsMainCamera == 1) {
625 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
626 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
627 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
628 // related session id should be session id of linked session
629 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
630 } else {
631 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
632 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
633 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
634 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
635 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800636 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800637 pthread_mutex_unlock(&gCamLock);
638
639 rc = mCameraHandle->ops->set_dual_cam_cmd(
640 mCameraHandle->camera_handle);
641 if (rc < 0) {
642 LOGE("Dualcam: Unlink failed, but still proceed to close");
643 }
644 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700645
646 /* We need to stop all streams before deleting any stream */
647 if (mRawDumpChannel) {
648 mRawDumpChannel->stop();
649 }
650
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700651 if (mHdrPlusRawSrcChannel) {
652 mHdrPlusRawSrcChannel->stop();
653 }
654
Thierry Strudel3d639192016-09-09 11:52:26 -0700655 // NOTE: 'camera3_stream_t *' objects are already freed at
656 // this stage by the framework
657 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
658 it != mStreamInfo.end(); it++) {
659 QCamera3ProcessingChannel *channel = (*it)->channel;
660 if (channel) {
661 channel->stop();
662 }
663 }
664 if (mSupportChannel)
665 mSupportChannel->stop();
666
667 if (mAnalysisChannel) {
668 mAnalysisChannel->stop();
669 }
670 if (mMetadataChannel) {
671 mMetadataChannel->stop();
672 }
673 if (mChannelHandle) {
674 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
675 mChannelHandle);
676 LOGD("stopping channel %d", mChannelHandle);
677 }
678
679 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
680 it != mStreamInfo.end(); it++) {
681 QCamera3ProcessingChannel *channel = (*it)->channel;
682 if (channel)
683 delete channel;
684 free (*it);
685 }
686 if (mSupportChannel) {
687 delete mSupportChannel;
688 mSupportChannel = NULL;
689 }
690
691 if (mAnalysisChannel) {
692 delete mAnalysisChannel;
693 mAnalysisChannel = NULL;
694 }
695 if (mRawDumpChannel) {
696 delete mRawDumpChannel;
697 mRawDumpChannel = NULL;
698 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700699 if (mHdrPlusRawSrcChannel) {
700 delete mHdrPlusRawSrcChannel;
701 mHdrPlusRawSrcChannel = NULL;
702 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700703 if (mDummyBatchChannel) {
704 delete mDummyBatchChannel;
705 mDummyBatchChannel = NULL;
706 }
707
708 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800709 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700710
711 if (mMetadataChannel) {
712 delete mMetadataChannel;
713 mMetadataChannel = NULL;
714 }
715
716 /* Clean up all channels */
717 if (mCameraInitialized) {
718 if(!mFirstConfiguration){
719 //send the last unconfigure
720 cam_stream_size_info_t stream_config_info;
721 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
722 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
723 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800724 m_bIs4KVideo ? 0 :
725 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700726 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700727 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
728 stream_config_info);
729 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
730 if (rc < 0) {
731 LOGE("set_parms failed for unconfigure");
732 }
733 }
734 deinitParameters();
735 }
736
737 if (mChannelHandle) {
738 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
739 mChannelHandle);
740 LOGH("deleting channel %d", mChannelHandle);
741 mChannelHandle = 0;
742 }
743
744 if (mState != CLOSED)
745 closeCamera();
746
747 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
748 req.mPendingBufferList.clear();
749 }
750 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700751 for (pendingRequestIterator i = mPendingRequestsList.begin();
752 i != mPendingRequestsList.end();) {
753 i = erasePendingRequest(i);
754 }
755 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
756 if (mDefaultMetadata[i])
757 free_camera_metadata(mDefaultMetadata[i]);
758
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800759 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700760
761 pthread_cond_destroy(&mRequestCond);
762
763 pthread_cond_destroy(&mBuffersCond);
764
765 pthread_mutex_destroy(&mMutex);
766 LOGD("X");
767}
768
769/*===========================================================================
770 * FUNCTION : erasePendingRequest
771 *
772 * DESCRIPTION: function to erase a desired pending request after freeing any
773 * allocated memory
774 *
775 * PARAMETERS :
776 * @i : iterator pointing to pending request to be erased
777 *
778 * RETURN : iterator pointing to the next request
779 *==========================================================================*/
780QCamera3HardwareInterface::pendingRequestIterator
781 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
782{
783 if (i->input_buffer != NULL) {
784 free(i->input_buffer);
785 i->input_buffer = NULL;
786 }
787 if (i->settings != NULL)
788 free_camera_metadata((camera_metadata_t*)i->settings);
789 return mPendingRequestsList.erase(i);
790}
791
792/*===========================================================================
793 * FUNCTION : camEvtHandle
794 *
795 * DESCRIPTION: Function registered to mm-camera-interface to handle events
796 *
797 * PARAMETERS :
798 * @camera_handle : interface layer camera handle
799 * @evt : ptr to event
800 * @user_data : user data ptr
801 *
802 * RETURN : none
803 *==========================================================================*/
804void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
805 mm_camera_event_t *evt,
806 void *user_data)
807{
808 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
809 if (obj && evt) {
810 switch(evt->server_event_type) {
811 case CAM_EVENT_TYPE_DAEMON_DIED:
812 pthread_mutex_lock(&obj->mMutex);
813 obj->mState = ERROR;
814 pthread_mutex_unlock(&obj->mMutex);
815 LOGE("Fatal, camera daemon died");
816 break;
817
818 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
819 LOGD("HAL got request pull from Daemon");
820 pthread_mutex_lock(&obj->mMutex);
821 obj->mWokenUpByDaemon = true;
822 obj->unblockRequestIfNecessary();
823 pthread_mutex_unlock(&obj->mMutex);
824 break;
825
826 default:
827 LOGW("Warning: Unhandled event %d",
828 evt->server_event_type);
829 break;
830 }
831 } else {
832 LOGE("NULL user_data/evt");
833 }
834}
835
836/*===========================================================================
837 * FUNCTION : openCamera
838 *
839 * DESCRIPTION: open camera
840 *
841 * PARAMETERS :
842 * @hw_device : double ptr for camera device struct
843 *
844 * RETURN : int32_t type of status
845 * NO_ERROR -- success
846 * none-zero failure code
847 *==========================================================================*/
848int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
849{
850 int rc = 0;
851 if (mState != CLOSED) {
852 *hw_device = NULL;
853 return PERMISSION_DENIED;
854 }
855
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800856 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700857 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
858 mCameraId);
859
860 rc = openCamera();
861 if (rc == 0) {
862 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800863 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700864 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800865 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700866
Thierry Strudel3d639192016-09-09 11:52:26 -0700867 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
868 mCameraId, rc);
869
870 if (rc == NO_ERROR) {
871 mState = OPENED;
872 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800873
Thierry Strudel3d639192016-09-09 11:52:26 -0700874 return rc;
875}
876
877/*===========================================================================
878 * FUNCTION : openCamera
879 *
880 * DESCRIPTION: open camera
881 *
882 * PARAMETERS : none
883 *
884 * RETURN : int32_t type of status
885 * NO_ERROR -- success
886 * none-zero failure code
887 *==========================================================================*/
888int QCamera3HardwareInterface::openCamera()
889{
890 int rc = 0;
891 char value[PROPERTY_VALUE_MAX];
892
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800893 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700894 if (mCameraHandle) {
895 LOGE("Failure: Camera already opened");
896 return ALREADY_EXISTS;
897 }
898
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700899 {
900 Mutex::Autolock l(gHdrPlusClientLock);
901 if (gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700902 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700903 rc = gEaselManagerClient.resume();
904 if (rc != 0) {
905 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
906 return rc;
907 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800908 }
909 }
910
Thierry Strudel3d639192016-09-09 11:52:26 -0700911 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
912 if (rc < 0) {
913 LOGE("Failed to reserve flash for camera id: %d",
914 mCameraId);
915 return UNKNOWN_ERROR;
916 }
917
918 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
919 if (rc) {
920 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
921 return rc;
922 }
923
924 if (!mCameraHandle) {
925 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
926 return -ENODEV;
927 }
928
929 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
930 camEvtHandle, (void *)this);
931
932 if (rc < 0) {
933 LOGE("Error, failed to register event callback");
934 /* Not closing camera here since it is already handled in destructor */
935 return FAILED_TRANSACTION;
936 }
937
938 mExifParams.debug_params =
939 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
940 if (mExifParams.debug_params) {
941 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
942 } else {
943 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
944 return NO_MEMORY;
945 }
946 mFirstConfiguration = true;
947
948 //Notify display HAL that a camera session is active.
949 //But avoid calling the same during bootup because camera service might open/close
950 //cameras at boot time during its initialization and display service will also internally
951 //wait for camera service to initialize first while calling this display API, resulting in a
952 //deadlock situation. Since boot time camera open/close calls are made only to fetch
953 //capabilities, no need of this display bw optimization.
954 //Use "service.bootanim.exit" property to know boot status.
955 property_get("service.bootanim.exit", value, "0");
956 if (atoi(value) == 1) {
957 pthread_mutex_lock(&gCamLock);
958 if (gNumCameraSessions++ == 0) {
959 setCameraLaunchStatus(true);
960 }
961 pthread_mutex_unlock(&gCamLock);
962 }
963
964 //fill the session id needed while linking dual cam
965 pthread_mutex_lock(&gCamLock);
966 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
967 &sessionId[mCameraId]);
968 pthread_mutex_unlock(&gCamLock);
969
970 if (rc < 0) {
971 LOGE("Error, failed to get sessiion id");
972 return UNKNOWN_ERROR;
973 } else {
974 //Allocate related cam sync buffer
975 //this is needed for the payload that goes along with bundling cmd for related
976 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700977 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
978 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700979 if(rc != OK) {
980 rc = NO_MEMORY;
981 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
982 return NO_MEMORY;
983 }
984
985 //Map memory for related cam sync buffer
986 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700987 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
988 m_pDualCamCmdHeap->getFd(0),
989 sizeof(cam_dual_camera_cmd_info_t),
990 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700991 if(rc < 0) {
992 LOGE("Dualcam: failed to map Related cam sync buffer");
993 rc = FAILED_TRANSACTION;
994 return NO_MEMORY;
995 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700996 m_pDualCamCmdPtr =
997 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700998 }
999
1000 LOGH("mCameraId=%d",mCameraId);
1001
1002 return NO_ERROR;
1003}
1004
1005/*===========================================================================
1006 * FUNCTION : closeCamera
1007 *
1008 * DESCRIPTION: close camera
1009 *
1010 * PARAMETERS : none
1011 *
1012 * RETURN : int32_t type of status
1013 * NO_ERROR -- success
1014 * none-zero failure code
1015 *==========================================================================*/
1016int QCamera3HardwareInterface::closeCamera()
1017{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001018 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001019 int rc = NO_ERROR;
1020 char value[PROPERTY_VALUE_MAX];
1021
1022 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1023 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001024
1025 // unmap memory for related cam sync buffer
1026 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001027 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001028 if (NULL != m_pDualCamCmdHeap) {
1029 m_pDualCamCmdHeap->deallocate();
1030 delete m_pDualCamCmdHeap;
1031 m_pDualCamCmdHeap = NULL;
1032 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001033 }
1034
Thierry Strudel3d639192016-09-09 11:52:26 -07001035 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1036 mCameraHandle = NULL;
1037
1038 //reset session id to some invalid id
1039 pthread_mutex_lock(&gCamLock);
1040 sessionId[mCameraId] = 0xDEADBEEF;
1041 pthread_mutex_unlock(&gCamLock);
1042
1043 //Notify display HAL that there is no active camera session
1044 //but avoid calling the same during bootup. Refer to openCamera
1045 //for more details.
1046 property_get("service.bootanim.exit", value, "0");
1047 if (atoi(value) == 1) {
1048 pthread_mutex_lock(&gCamLock);
1049 if (--gNumCameraSessions == 0) {
1050 setCameraLaunchStatus(false);
1051 }
1052 pthread_mutex_unlock(&gCamLock);
1053 }
1054
Thierry Strudel3d639192016-09-09 11:52:26 -07001055 if (mExifParams.debug_params) {
1056 free(mExifParams.debug_params);
1057 mExifParams.debug_params = NULL;
1058 }
1059 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1060 LOGW("Failed to release flash for camera id: %d",
1061 mCameraId);
1062 }
1063 mState = CLOSED;
1064 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1065 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001066
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001067 {
1068 Mutex::Autolock l(gHdrPlusClientLock);
1069 if (gHdrPlusClient != nullptr) {
1070 // Disable HDR+ mode.
1071 disableHdrPlusModeLocked();
1072 // Disconnect Easel if it's connected.
1073 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1074 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001075 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001076
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001077 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001078 rc = gEaselManagerClient.stopMipi(mCameraId);
1079 if (rc != 0) {
1080 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1081 }
1082
1083 rc = gEaselManagerClient.suspend();
1084 if (rc != 0) {
1085 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1086 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001087 }
1088 }
1089
Thierry Strudel3d639192016-09-09 11:52:26 -07001090 return rc;
1091}
1092
1093/*===========================================================================
1094 * FUNCTION : initialize
1095 *
1096 * DESCRIPTION: Initialize frameworks callback functions
1097 *
1098 * PARAMETERS :
1099 * @callback_ops : callback function to frameworks
1100 *
1101 * RETURN :
1102 *
1103 *==========================================================================*/
1104int QCamera3HardwareInterface::initialize(
1105 const struct camera3_callback_ops *callback_ops)
1106{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001107 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001108 int rc;
1109
1110 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1111 pthread_mutex_lock(&mMutex);
1112
1113 // Validate current state
1114 switch (mState) {
1115 case OPENED:
1116 /* valid state */
1117 break;
1118 default:
1119 LOGE("Invalid state %d", mState);
1120 rc = -ENODEV;
1121 goto err1;
1122 }
1123
1124 rc = initParameters();
1125 if (rc < 0) {
1126 LOGE("initParamters failed %d", rc);
1127 goto err1;
1128 }
1129 mCallbackOps = callback_ops;
1130
1131 mChannelHandle = mCameraHandle->ops->add_channel(
1132 mCameraHandle->camera_handle, NULL, NULL, this);
1133 if (mChannelHandle == 0) {
1134 LOGE("add_channel failed");
1135 rc = -ENOMEM;
1136 pthread_mutex_unlock(&mMutex);
1137 return rc;
1138 }
1139
1140 pthread_mutex_unlock(&mMutex);
1141 mCameraInitialized = true;
1142 mState = INITIALIZED;
1143 LOGI("X");
1144 return 0;
1145
1146err1:
1147 pthread_mutex_unlock(&mMutex);
1148 return rc;
1149}
1150
1151/*===========================================================================
1152 * FUNCTION : validateStreamDimensions
1153 *
1154 * DESCRIPTION: Check if the configuration requested are those advertised
1155 *
1156 * PARAMETERS :
1157 * @stream_list : streams to be configured
1158 *
1159 * RETURN :
1160 *
1161 *==========================================================================*/
1162int QCamera3HardwareInterface::validateStreamDimensions(
1163 camera3_stream_configuration_t *streamList)
1164{
1165 int rc = NO_ERROR;
1166 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001167 uint32_t depthWidth = 0;
1168 uint32_t depthHeight = 0;
1169 if (mPDSupported) {
1170 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1171 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1172 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001173
1174 camera3_stream_t *inputStream = NULL;
1175 /*
1176 * Loop through all streams to find input stream if it exists*
1177 */
1178 for (size_t i = 0; i< streamList->num_streams; i++) {
1179 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1180 if (inputStream != NULL) {
1181 LOGE("Error, Multiple input streams requested");
1182 return -EINVAL;
1183 }
1184 inputStream = streamList->streams[i];
1185 }
1186 }
1187 /*
1188 * Loop through all streams requested in configuration
1189 * Check if unsupported sizes have been requested on any of them
1190 */
1191 for (size_t j = 0; j < streamList->num_streams; j++) {
1192 bool sizeFound = false;
1193 camera3_stream_t *newStream = streamList->streams[j];
1194
1195 uint32_t rotatedHeight = newStream->height;
1196 uint32_t rotatedWidth = newStream->width;
1197 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1198 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1199 rotatedHeight = newStream->width;
1200 rotatedWidth = newStream->height;
1201 }
1202
1203 /*
1204 * Sizes are different for each type of stream format check against
1205 * appropriate table.
1206 */
1207 switch (newStream->format) {
1208 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1209 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1210 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001211 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1212 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1213 mPDSupported) {
1214 if ((depthWidth == newStream->width) &&
1215 (depthHeight == newStream->height)) {
1216 sizeFound = true;
1217 }
1218 break;
1219 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001220 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1221 for (size_t i = 0; i < count; i++) {
1222 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1223 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1224 sizeFound = true;
1225 break;
1226 }
1227 }
1228 break;
1229 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001230 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1231 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001232 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001233 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001234 if ((depthSamplesCount == newStream->width) &&
1235 (1 == newStream->height)) {
1236 sizeFound = true;
1237 }
1238 break;
1239 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001240 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1241 /* Verify set size against generated sizes table */
1242 for (size_t i = 0; i < count; i++) {
1243 if (((int32_t)rotatedWidth ==
1244 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1245 ((int32_t)rotatedHeight ==
1246 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1247 sizeFound = true;
1248 break;
1249 }
1250 }
1251 break;
1252 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1253 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1254 default:
1255 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1256 || newStream->stream_type == CAMERA3_STREAM_INPUT
1257 || IS_USAGE_ZSL(newStream->usage)) {
1258 if (((int32_t)rotatedWidth ==
1259 gCamCapability[mCameraId]->active_array_size.width) &&
1260 ((int32_t)rotatedHeight ==
1261 gCamCapability[mCameraId]->active_array_size.height)) {
1262 sizeFound = true;
1263 break;
1264 }
1265 /* We could potentially break here to enforce ZSL stream
1266 * set from frameworks always is full active array size
1267 * but it is not clear from the spc if framework will always
1268 * follow that, also we have logic to override to full array
1269 * size, so keeping the logic lenient at the moment
1270 */
1271 }
1272 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1273 MAX_SIZES_CNT);
1274 for (size_t i = 0; i < count; i++) {
1275 if (((int32_t)rotatedWidth ==
1276 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1277 ((int32_t)rotatedHeight ==
1278 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1279 sizeFound = true;
1280 break;
1281 }
1282 }
1283 break;
1284 } /* End of switch(newStream->format) */
1285
1286 /* We error out even if a single stream has unsupported size set */
1287 if (!sizeFound) {
1288 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1289 rotatedWidth, rotatedHeight, newStream->format,
1290 gCamCapability[mCameraId]->active_array_size.width,
1291 gCamCapability[mCameraId]->active_array_size.height);
1292 rc = -EINVAL;
1293 break;
1294 }
1295 } /* End of for each stream */
1296 return rc;
1297}
1298
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001299/*===========================================================================
1300 * FUNCTION : validateUsageFlags
1301 *
1302 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1303 *
1304 * PARAMETERS :
1305 * @stream_list : streams to be configured
1306 *
1307 * RETURN :
1308 * NO_ERROR if the usage flags are supported
1309 * error code if usage flags are not supported
1310 *
1311 *==========================================================================*/
1312int QCamera3HardwareInterface::validateUsageFlags(
1313 const camera3_stream_configuration_t* streamList)
1314{
1315 for (size_t j = 0; j < streamList->num_streams; j++) {
1316 const camera3_stream_t *newStream = streamList->streams[j];
1317
1318 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1319 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1320 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1321 continue;
1322 }
1323
1324 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1325 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1326 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1327 bool forcePreviewUBWC = true;
1328 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1329 forcePreviewUBWC = false;
1330 }
1331 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1332 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1333 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1334 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1335 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1336 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1337
1338 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1339 // So color spaces will always match.
1340
1341 // Check whether underlying formats of shared streams match.
1342 if (isVideo && isPreview && videoFormat != previewFormat) {
1343 LOGE("Combined video and preview usage flag is not supported");
1344 return -EINVAL;
1345 }
1346 if (isPreview && isZSL && previewFormat != zslFormat) {
1347 LOGE("Combined preview and zsl usage flag is not supported");
1348 return -EINVAL;
1349 }
1350 if (isVideo && isZSL && videoFormat != zslFormat) {
1351 LOGE("Combined video and zsl usage flag is not supported");
1352 return -EINVAL;
1353 }
1354 }
1355 return NO_ERROR;
1356}
1357
1358/*===========================================================================
1359 * FUNCTION : validateUsageFlagsForEis
1360 *
1361 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1362 *
1363 * PARAMETERS :
1364 * @stream_list : streams to be configured
1365 *
1366 * RETURN :
1367 * NO_ERROR if the usage flags are supported
1368 * error code if usage flags are not supported
1369 *
1370 *==========================================================================*/
1371int QCamera3HardwareInterface::validateUsageFlagsForEis(
1372 const camera3_stream_configuration_t* streamList)
1373{
1374 for (size_t j = 0; j < streamList->num_streams; j++) {
1375 const camera3_stream_t *newStream = streamList->streams[j];
1376
1377 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1378 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1379
1380 // Because EIS is "hard-coded" for certain use case, and current
1381 // implementation doesn't support shared preview and video on the same
1382 // stream, return failure if EIS is forced on.
1383 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1384 LOGE("Combined video and preview usage flag is not supported due to EIS");
1385 return -EINVAL;
1386 }
1387 }
1388 return NO_ERROR;
1389}
1390
Thierry Strudel3d639192016-09-09 11:52:26 -07001391/*==============================================================================
1392 * FUNCTION : isSupportChannelNeeded
1393 *
1394 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1395 *
1396 * PARAMETERS :
1397 * @stream_list : streams to be configured
1398 * @stream_config_info : the config info for streams to be configured
1399 *
1400 * RETURN : Boolen true/false decision
1401 *
1402 *==========================================================================*/
1403bool QCamera3HardwareInterface::isSupportChannelNeeded(
1404 camera3_stream_configuration_t *streamList,
1405 cam_stream_size_info_t stream_config_info)
1406{
1407 uint32_t i;
1408 bool pprocRequested = false;
1409 /* Check for conditions where PProc pipeline does not have any streams*/
1410 for (i = 0; i < stream_config_info.num_streams; i++) {
1411 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1412 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1413 pprocRequested = true;
1414 break;
1415 }
1416 }
1417
1418 if (pprocRequested == false )
1419 return true;
1420
1421 /* Dummy stream needed if only raw or jpeg streams present */
1422 for (i = 0; i < streamList->num_streams; i++) {
1423 switch(streamList->streams[i]->format) {
1424 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1425 case HAL_PIXEL_FORMAT_RAW10:
1426 case HAL_PIXEL_FORMAT_RAW16:
1427 case HAL_PIXEL_FORMAT_BLOB:
1428 break;
1429 default:
1430 return false;
1431 }
1432 }
1433 return true;
1434}
1435
1436/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001437 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001438 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001439 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001440 *
1441 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001442 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001443 *
1444 * RETURN : int32_t type of status
1445 * NO_ERROR -- success
1446 * none-zero failure code
1447 *
1448 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001449int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001450{
1451 int32_t rc = NO_ERROR;
1452
1453 cam_dimension_t max_dim = {0, 0};
1454 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1455 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1456 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1457 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1458 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1459 }
1460
1461 clear_metadata_buffer(mParameters);
1462
1463 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1464 max_dim);
1465 if (rc != NO_ERROR) {
1466 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1467 return rc;
1468 }
1469
1470 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1471 if (rc != NO_ERROR) {
1472 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1473 return rc;
1474 }
1475
1476 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001477 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001478
1479 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1480 mParameters);
1481 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001482 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001483 return rc;
1484 }
1485
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001486 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001487 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1488 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1489 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1490 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1491 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001492
1493 return rc;
1494}
1495
1496/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001497 * FUNCTION : addToPPFeatureMask
1498 *
1499 * DESCRIPTION: add additional features to pp feature mask based on
1500 * stream type and usecase
1501 *
1502 * PARAMETERS :
1503 * @stream_format : stream type for feature mask
1504 * @stream_idx : stream idx within postprocess_mask list to change
1505 *
1506 * RETURN : NULL
1507 *
1508 *==========================================================================*/
1509void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1510 uint32_t stream_idx)
1511{
1512 char feature_mask_value[PROPERTY_VALUE_MAX];
1513 cam_feature_mask_t feature_mask;
1514 int args_converted;
1515 int property_len;
1516
1517 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001518#ifdef _LE_CAMERA_
1519 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1520 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1521 property_len = property_get("persist.camera.hal3.feature",
1522 feature_mask_value, swtnr_feature_mask_value);
1523#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001524 property_len = property_get("persist.camera.hal3.feature",
1525 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001526#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001527 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1528 (feature_mask_value[1] == 'x')) {
1529 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1530 } else {
1531 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1532 }
1533 if (1 != args_converted) {
1534 feature_mask = 0;
1535 LOGE("Wrong feature mask %s", feature_mask_value);
1536 return;
1537 }
1538
1539 switch (stream_format) {
1540 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1541 /* Add LLVD to pp feature mask only if video hint is enabled */
1542 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1543 mStreamConfigInfo.postprocess_mask[stream_idx]
1544 |= CAM_QTI_FEATURE_SW_TNR;
1545 LOGH("Added SW TNR to pp feature mask");
1546 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1547 mStreamConfigInfo.postprocess_mask[stream_idx]
1548 |= CAM_QCOM_FEATURE_LLVD;
1549 LOGH("Added LLVD SeeMore to pp feature mask");
1550 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001551 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1552 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1553 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1554 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001555 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1556 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1557 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1558 CAM_QTI_FEATURE_BINNING_CORRECTION;
1559 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001560 break;
1561 }
1562 default:
1563 break;
1564 }
1565 LOGD("PP feature mask %llx",
1566 mStreamConfigInfo.postprocess_mask[stream_idx]);
1567}
1568
1569/*==============================================================================
1570 * FUNCTION : updateFpsInPreviewBuffer
1571 *
1572 * DESCRIPTION: update FPS information in preview buffer.
1573 *
1574 * PARAMETERS :
1575 * @metadata : pointer to metadata buffer
1576 * @frame_number: frame_number to look for in pending buffer list
1577 *
1578 * RETURN : None
1579 *
1580 *==========================================================================*/
1581void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1582 uint32_t frame_number)
1583{
1584 // Mark all pending buffers for this particular request
1585 // with corresponding framerate information
1586 for (List<PendingBuffersInRequest>::iterator req =
1587 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1588 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1589 for(List<PendingBufferInfo>::iterator j =
1590 req->mPendingBufferList.begin();
1591 j != req->mPendingBufferList.end(); j++) {
1592 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1593 if ((req->frame_number == frame_number) &&
1594 (channel->getStreamTypeMask() &
1595 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1596 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1597 CAM_INTF_PARM_FPS_RANGE, metadata) {
1598 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1599 struct private_handle_t *priv_handle =
1600 (struct private_handle_t *)(*(j->buffer));
1601 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1602 }
1603 }
1604 }
1605 }
1606}
1607
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001608/*==============================================================================
1609 * FUNCTION : updateTimeStampInPendingBuffers
1610 *
1611 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1612 * of a frame number
1613 *
1614 * PARAMETERS :
1615 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1616 * @timestamp : timestamp to be set
1617 *
1618 * RETURN : None
1619 *
1620 *==========================================================================*/
1621void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1622 uint32_t frameNumber, nsecs_t timestamp)
1623{
1624 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1625 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1626 if (req->frame_number != frameNumber)
1627 continue;
1628
1629 for (auto k = req->mPendingBufferList.begin();
1630 k != req->mPendingBufferList.end(); k++ ) {
1631 struct private_handle_t *priv_handle =
1632 (struct private_handle_t *) (*(k->buffer));
1633 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1634 }
1635 }
1636 return;
1637}
1638
Thierry Strudel3d639192016-09-09 11:52:26 -07001639/*===========================================================================
1640 * FUNCTION : configureStreams
1641 *
1642 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1643 * and output streams.
1644 *
1645 * PARAMETERS :
1646 * @stream_list : streams to be configured
1647 *
1648 * RETURN :
1649 *
1650 *==========================================================================*/
1651int QCamera3HardwareInterface::configureStreams(
1652 camera3_stream_configuration_t *streamList)
1653{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001654 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001655 int rc = 0;
1656
1657 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001658 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001659 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001660 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001661
1662 return rc;
1663}
1664
1665/*===========================================================================
1666 * FUNCTION : configureStreamsPerfLocked
1667 *
1668 * DESCRIPTION: configureStreams while perfLock is held.
1669 *
1670 * PARAMETERS :
1671 * @stream_list : streams to be configured
1672 *
1673 * RETURN : int32_t type of status
1674 * NO_ERROR -- success
1675 * none-zero failure code
1676 *==========================================================================*/
1677int QCamera3HardwareInterface::configureStreamsPerfLocked(
1678 camera3_stream_configuration_t *streamList)
1679{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001680 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001681 int rc = 0;
1682
1683 // Sanity check stream_list
1684 if (streamList == NULL) {
1685 LOGE("NULL stream configuration");
1686 return BAD_VALUE;
1687 }
1688 if (streamList->streams == NULL) {
1689 LOGE("NULL stream list");
1690 return BAD_VALUE;
1691 }
1692
1693 if (streamList->num_streams < 1) {
1694 LOGE("Bad number of streams requested: %d",
1695 streamList->num_streams);
1696 return BAD_VALUE;
1697 }
1698
1699 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1700 LOGE("Maximum number of streams %d exceeded: %d",
1701 MAX_NUM_STREAMS, streamList->num_streams);
1702 return BAD_VALUE;
1703 }
1704
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001705 rc = validateUsageFlags(streamList);
1706 if (rc != NO_ERROR) {
1707 return rc;
1708 }
1709
Thierry Strudel3d639192016-09-09 11:52:26 -07001710 mOpMode = streamList->operation_mode;
1711 LOGD("mOpMode: %d", mOpMode);
1712
1713 /* first invalidate all the steams in the mStreamList
1714 * if they appear again, they will be validated */
1715 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1716 it != mStreamInfo.end(); it++) {
1717 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1718 if (channel) {
1719 channel->stop();
1720 }
1721 (*it)->status = INVALID;
1722 }
1723
1724 if (mRawDumpChannel) {
1725 mRawDumpChannel->stop();
1726 delete mRawDumpChannel;
1727 mRawDumpChannel = NULL;
1728 }
1729
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001730 if (mHdrPlusRawSrcChannel) {
1731 mHdrPlusRawSrcChannel->stop();
1732 delete mHdrPlusRawSrcChannel;
1733 mHdrPlusRawSrcChannel = NULL;
1734 }
1735
Thierry Strudel3d639192016-09-09 11:52:26 -07001736 if (mSupportChannel)
1737 mSupportChannel->stop();
1738
1739 if (mAnalysisChannel) {
1740 mAnalysisChannel->stop();
1741 }
1742 if (mMetadataChannel) {
1743 /* If content of mStreamInfo is not 0, there is metadata stream */
1744 mMetadataChannel->stop();
1745 }
1746 if (mChannelHandle) {
1747 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1748 mChannelHandle);
1749 LOGD("stopping channel %d", mChannelHandle);
1750 }
1751
1752 pthread_mutex_lock(&mMutex);
1753
1754 // Check state
1755 switch (mState) {
1756 case INITIALIZED:
1757 case CONFIGURED:
1758 case STARTED:
1759 /* valid state */
1760 break;
1761 default:
1762 LOGE("Invalid state %d", mState);
1763 pthread_mutex_unlock(&mMutex);
1764 return -ENODEV;
1765 }
1766
1767 /* Check whether we have video stream */
1768 m_bIs4KVideo = false;
1769 m_bIsVideo = false;
1770 m_bEisSupportedSize = false;
1771 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001772 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001773 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001774 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001775 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001776 uint32_t videoWidth = 0U;
1777 uint32_t videoHeight = 0U;
1778 size_t rawStreamCnt = 0;
1779 size_t stallStreamCnt = 0;
1780 size_t processedStreamCnt = 0;
1781 // Number of streams on ISP encoder path
1782 size_t numStreamsOnEncoder = 0;
1783 size_t numYuv888OnEncoder = 0;
1784 bool bYuv888OverrideJpeg = false;
1785 cam_dimension_t largeYuv888Size = {0, 0};
1786 cam_dimension_t maxViewfinderSize = {0, 0};
1787 bool bJpegExceeds4K = false;
1788 bool bJpegOnEncoder = false;
1789 bool bUseCommonFeatureMask = false;
1790 cam_feature_mask_t commonFeatureMask = 0;
1791 bool bSmallJpegSize = false;
1792 uint32_t width_ratio;
1793 uint32_t height_ratio;
1794 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1795 camera3_stream_t *inputStream = NULL;
1796 bool isJpeg = false;
1797 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001798 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001799 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001800
1801 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1802
1803 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001804 uint8_t eis_prop_set;
1805 uint32_t maxEisWidth = 0;
1806 uint32_t maxEisHeight = 0;
1807
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001808 // Initialize all instant AEC related variables
1809 mInstantAEC = false;
1810 mResetInstantAEC = false;
1811 mInstantAECSettledFrameNumber = 0;
1812 mAecSkipDisplayFrameBound = 0;
1813 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001814 mCurrFeatureState = 0;
1815 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001816
Thierry Strudel3d639192016-09-09 11:52:26 -07001817 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1818
1819 size_t count = IS_TYPE_MAX;
1820 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1821 for (size_t i = 0; i < count; i++) {
1822 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001823 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1824 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001825 break;
1826 }
1827 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001828
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001829 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001830 maxEisWidth = MAX_EIS_WIDTH;
1831 maxEisHeight = MAX_EIS_HEIGHT;
1832 }
1833
1834 /* EIS setprop control */
1835 char eis_prop[PROPERTY_VALUE_MAX];
1836 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001837 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001838 eis_prop_set = (uint8_t)atoi(eis_prop);
1839
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001840 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001841 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1842
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001843 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1844 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001845
Thierry Strudel3d639192016-09-09 11:52:26 -07001846 /* stream configurations */
1847 for (size_t i = 0; i < streamList->num_streams; i++) {
1848 camera3_stream_t *newStream = streamList->streams[i];
1849 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1850 "height = %d, rotation = %d, usage = 0x%x",
1851 i, newStream->stream_type, newStream->format,
1852 newStream->width, newStream->height, newStream->rotation,
1853 newStream->usage);
1854 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1855 newStream->stream_type == CAMERA3_STREAM_INPUT){
1856 isZsl = true;
1857 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001858 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1859 IS_USAGE_PREVIEW(newStream->usage)) {
1860 isPreview = true;
1861 }
1862
Thierry Strudel3d639192016-09-09 11:52:26 -07001863 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1864 inputStream = newStream;
1865 }
1866
Emilian Peev7650c122017-01-19 08:24:33 -08001867 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1868 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001869 isJpeg = true;
1870 jpegSize.width = newStream->width;
1871 jpegSize.height = newStream->height;
1872 if (newStream->width > VIDEO_4K_WIDTH ||
1873 newStream->height > VIDEO_4K_HEIGHT)
1874 bJpegExceeds4K = true;
1875 }
1876
1877 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1878 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1879 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001880 // In HAL3 we can have multiple different video streams.
1881 // The variables video width and height are used below as
1882 // dimensions of the biggest of them
1883 if (videoWidth < newStream->width ||
1884 videoHeight < newStream->height) {
1885 videoWidth = newStream->width;
1886 videoHeight = newStream->height;
1887 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001888 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1889 (VIDEO_4K_HEIGHT <= newStream->height)) {
1890 m_bIs4KVideo = true;
1891 }
1892 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1893 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001894
Thierry Strudel3d639192016-09-09 11:52:26 -07001895 }
1896 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1897 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1898 switch (newStream->format) {
1899 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001900 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1901 depthPresent = true;
1902 break;
1903 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001904 stallStreamCnt++;
1905 if (isOnEncoder(maxViewfinderSize, newStream->width,
1906 newStream->height)) {
1907 numStreamsOnEncoder++;
1908 bJpegOnEncoder = true;
1909 }
1910 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1911 newStream->width);
1912 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1913 newStream->height);;
1914 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1915 "FATAL: max_downscale_factor cannot be zero and so assert");
1916 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1917 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1918 LOGH("Setting small jpeg size flag to true");
1919 bSmallJpegSize = true;
1920 }
1921 break;
1922 case HAL_PIXEL_FORMAT_RAW10:
1923 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1924 case HAL_PIXEL_FORMAT_RAW16:
1925 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001926 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1927 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1928 pdStatCount++;
1929 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001930 break;
1931 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1932 processedStreamCnt++;
1933 if (isOnEncoder(maxViewfinderSize, newStream->width,
1934 newStream->height)) {
1935 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1936 !IS_USAGE_ZSL(newStream->usage)) {
1937 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1938 }
1939 numStreamsOnEncoder++;
1940 }
1941 break;
1942 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1943 processedStreamCnt++;
1944 if (isOnEncoder(maxViewfinderSize, newStream->width,
1945 newStream->height)) {
1946 // If Yuv888 size is not greater than 4K, set feature mask
1947 // to SUPERSET so that it support concurrent request on
1948 // YUV and JPEG.
1949 if (newStream->width <= VIDEO_4K_WIDTH &&
1950 newStream->height <= VIDEO_4K_HEIGHT) {
1951 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1952 }
1953 numStreamsOnEncoder++;
1954 numYuv888OnEncoder++;
1955 largeYuv888Size.width = newStream->width;
1956 largeYuv888Size.height = newStream->height;
1957 }
1958 break;
1959 default:
1960 processedStreamCnt++;
1961 if (isOnEncoder(maxViewfinderSize, newStream->width,
1962 newStream->height)) {
1963 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1964 numStreamsOnEncoder++;
1965 }
1966 break;
1967 }
1968
1969 }
1970 }
1971
1972 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1973 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1974 !m_bIsVideo) {
1975 m_bEisEnable = false;
1976 }
1977
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001978 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1979 pthread_mutex_unlock(&mMutex);
1980 return -EINVAL;
1981 }
1982
Thierry Strudel54dc9782017-02-15 12:12:10 -08001983 uint8_t forceEnableTnr = 0;
1984 char tnr_prop[PROPERTY_VALUE_MAX];
1985 memset(tnr_prop, 0, sizeof(tnr_prop));
1986 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1987 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1988
Thierry Strudel3d639192016-09-09 11:52:26 -07001989 /* Logic to enable/disable TNR based on specific config size/etc.*/
1990 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001991 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1992 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001993 else if (forceEnableTnr)
1994 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001995
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001996 char videoHdrProp[PROPERTY_VALUE_MAX];
1997 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1998 property_get("persist.camera.hdr.video", videoHdrProp, "0");
1999 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2000
2001 if (hdr_mode_prop == 1 && m_bIsVideo &&
2002 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2003 m_bVideoHdrEnabled = true;
2004 else
2005 m_bVideoHdrEnabled = false;
2006
2007
Thierry Strudel3d639192016-09-09 11:52:26 -07002008 /* Check if num_streams is sane */
2009 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2010 rawStreamCnt > MAX_RAW_STREAMS ||
2011 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2012 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2013 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2014 pthread_mutex_unlock(&mMutex);
2015 return -EINVAL;
2016 }
2017 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002018 if (isZsl && m_bIs4KVideo) {
2019 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002020 pthread_mutex_unlock(&mMutex);
2021 return -EINVAL;
2022 }
2023 /* Check if stream sizes are sane */
2024 if (numStreamsOnEncoder > 2) {
2025 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2026 pthread_mutex_unlock(&mMutex);
2027 return -EINVAL;
2028 } else if (1 < numStreamsOnEncoder){
2029 bUseCommonFeatureMask = true;
2030 LOGH("Multiple streams above max viewfinder size, common mask needed");
2031 }
2032
2033 /* Check if BLOB size is greater than 4k in 4k recording case */
2034 if (m_bIs4KVideo && bJpegExceeds4K) {
2035 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2036 pthread_mutex_unlock(&mMutex);
2037 return -EINVAL;
2038 }
2039
Emilian Peev7650c122017-01-19 08:24:33 -08002040 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2041 depthPresent) {
2042 LOGE("HAL doesn't support depth streams in HFR mode!");
2043 pthread_mutex_unlock(&mMutex);
2044 return -EINVAL;
2045 }
2046
Thierry Strudel3d639192016-09-09 11:52:26 -07002047 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2048 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2049 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2050 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2051 // configurations:
2052 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2053 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2054 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2055 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2056 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2057 __func__);
2058 pthread_mutex_unlock(&mMutex);
2059 return -EINVAL;
2060 }
2061
2062 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2063 // the YUV stream's size is greater or equal to the JPEG size, set common
2064 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2065 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2066 jpegSize.width, jpegSize.height) &&
2067 largeYuv888Size.width > jpegSize.width &&
2068 largeYuv888Size.height > jpegSize.height) {
2069 bYuv888OverrideJpeg = true;
2070 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2071 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2072 }
2073
2074 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2075 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2076 commonFeatureMask);
2077 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2078 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2079
2080 rc = validateStreamDimensions(streamList);
2081 if (rc == NO_ERROR) {
2082 rc = validateStreamRotations(streamList);
2083 }
2084 if (rc != NO_ERROR) {
2085 LOGE("Invalid stream configuration requested!");
2086 pthread_mutex_unlock(&mMutex);
2087 return rc;
2088 }
2089
Emilian Peev0f3c3162017-03-15 12:57:46 +00002090 if (1 < pdStatCount) {
2091 LOGE("HAL doesn't support multiple PD streams");
2092 pthread_mutex_unlock(&mMutex);
2093 return -EINVAL;
2094 }
2095
2096 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2097 (1 == pdStatCount)) {
2098 LOGE("HAL doesn't support PD streams in HFR mode!");
2099 pthread_mutex_unlock(&mMutex);
2100 return -EINVAL;
2101 }
2102
Thierry Strudel3d639192016-09-09 11:52:26 -07002103 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2104 for (size_t i = 0; i < streamList->num_streams; i++) {
2105 camera3_stream_t *newStream = streamList->streams[i];
2106 LOGH("newStream type = %d, stream format = %d "
2107 "stream size : %d x %d, stream rotation = %d",
2108 newStream->stream_type, newStream->format,
2109 newStream->width, newStream->height, newStream->rotation);
2110 //if the stream is in the mStreamList validate it
2111 bool stream_exists = false;
2112 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2113 it != mStreamInfo.end(); it++) {
2114 if ((*it)->stream == newStream) {
2115 QCamera3ProcessingChannel *channel =
2116 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2117 stream_exists = true;
2118 if (channel)
2119 delete channel;
2120 (*it)->status = VALID;
2121 (*it)->stream->priv = NULL;
2122 (*it)->channel = NULL;
2123 }
2124 }
2125 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2126 //new stream
2127 stream_info_t* stream_info;
2128 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2129 if (!stream_info) {
2130 LOGE("Could not allocate stream info");
2131 rc = -ENOMEM;
2132 pthread_mutex_unlock(&mMutex);
2133 return rc;
2134 }
2135 stream_info->stream = newStream;
2136 stream_info->status = VALID;
2137 stream_info->channel = NULL;
2138 mStreamInfo.push_back(stream_info);
2139 }
2140 /* Covers Opaque ZSL and API1 F/W ZSL */
2141 if (IS_USAGE_ZSL(newStream->usage)
2142 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2143 if (zslStream != NULL) {
2144 LOGE("Multiple input/reprocess streams requested!");
2145 pthread_mutex_unlock(&mMutex);
2146 return BAD_VALUE;
2147 }
2148 zslStream = newStream;
2149 }
2150 /* Covers YUV reprocess */
2151 if (inputStream != NULL) {
2152 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2153 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2154 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2155 && inputStream->width == newStream->width
2156 && inputStream->height == newStream->height) {
2157 if (zslStream != NULL) {
2158 /* This scenario indicates multiple YUV streams with same size
2159 * as input stream have been requested, since zsl stream handle
2160 * is solely use for the purpose of overriding the size of streams
2161 * which share h/w streams we will just make a guess here as to
2162 * which of the stream is a ZSL stream, this will be refactored
2163 * once we make generic logic for streams sharing encoder output
2164 */
2165 LOGH("Warning, Multiple ip/reprocess streams requested!");
2166 }
2167 zslStream = newStream;
2168 }
2169 }
2170 }
2171
2172 /* If a zsl stream is set, we know that we have configured at least one input or
2173 bidirectional stream */
2174 if (NULL != zslStream) {
2175 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2176 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2177 mInputStreamInfo.format = zslStream->format;
2178 mInputStreamInfo.usage = zslStream->usage;
2179 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2180 mInputStreamInfo.dim.width,
2181 mInputStreamInfo.dim.height,
2182 mInputStreamInfo.format, mInputStreamInfo.usage);
2183 }
2184
2185 cleanAndSortStreamInfo();
2186 if (mMetadataChannel) {
2187 delete mMetadataChannel;
2188 mMetadataChannel = NULL;
2189 }
2190 if (mSupportChannel) {
2191 delete mSupportChannel;
2192 mSupportChannel = NULL;
2193 }
2194
2195 if (mAnalysisChannel) {
2196 delete mAnalysisChannel;
2197 mAnalysisChannel = NULL;
2198 }
2199
2200 if (mDummyBatchChannel) {
2201 delete mDummyBatchChannel;
2202 mDummyBatchChannel = NULL;
2203 }
2204
Emilian Peev7650c122017-01-19 08:24:33 -08002205 if (mDepthChannel) {
2206 mDepthChannel = NULL;
2207 }
2208
Thierry Strudel2896d122017-02-23 19:18:03 -08002209 char is_type_value[PROPERTY_VALUE_MAX];
2210 property_get("persist.camera.is_type", is_type_value, "4");
2211 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2212
Binhao Line406f062017-05-03 14:39:44 -07002213 char property_value[PROPERTY_VALUE_MAX];
2214 property_get("persist.camera.gzoom.at", property_value, "0");
2215 int goog_zoom_at = atoi(property_value);
2216 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0);
2217 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0);
2218
2219 property_get("persist.camera.gzoom.4k", property_value, "0");
2220 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2221
Thierry Strudel3d639192016-09-09 11:52:26 -07002222 //Create metadata channel and initialize it
2223 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2224 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2225 gCamCapability[mCameraId]->color_arrangement);
2226 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2227 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002228 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002229 if (mMetadataChannel == NULL) {
2230 LOGE("failed to allocate metadata channel");
2231 rc = -ENOMEM;
2232 pthread_mutex_unlock(&mMutex);
2233 return rc;
2234 }
2235 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2236 if (rc < 0) {
2237 LOGE("metadata channel initialization failed");
2238 delete mMetadataChannel;
2239 mMetadataChannel = NULL;
2240 pthread_mutex_unlock(&mMutex);
2241 return rc;
2242 }
2243
Thierry Strudel2896d122017-02-23 19:18:03 -08002244 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002245 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002246 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002247 // Keep track of preview/video streams indices.
2248 // There could be more than one preview streams, but only one video stream.
2249 int32_t video_stream_idx = -1;
2250 int32_t preview_stream_idx[streamList->num_streams];
2251 size_t preview_stream_cnt = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07002252 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2253 /* Allocate channel objects for the requested streams */
2254 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002255
Thierry Strudel3d639192016-09-09 11:52:26 -07002256 camera3_stream_t *newStream = streamList->streams[i];
2257 uint32_t stream_usage = newStream->usage;
2258 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2259 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2260 struct camera_info *p_info = NULL;
2261 pthread_mutex_lock(&gCamLock);
2262 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2263 pthread_mutex_unlock(&gCamLock);
2264 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2265 || IS_USAGE_ZSL(newStream->usage)) &&
2266 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002267 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002268 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002269 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2270 if (bUseCommonFeatureMask)
2271 zsl_ppmask = commonFeatureMask;
2272 else
2273 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002274 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002275 if (numStreamsOnEncoder > 0)
2276 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2277 else
2278 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002279 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002280 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002281 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002282 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002283 LOGH("Input stream configured, reprocess config");
2284 } else {
2285 //for non zsl streams find out the format
2286 switch (newStream->format) {
2287 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2288 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002289 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002290 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2291 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2292 /* add additional features to pp feature mask */
2293 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2294 mStreamConfigInfo.num_streams);
2295
2296 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2297 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2298 CAM_STREAM_TYPE_VIDEO;
2299 if (m_bTnrEnabled && m_bTnrVideo) {
2300 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2301 CAM_QCOM_FEATURE_CPP_TNR;
2302 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2303 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2304 ~CAM_QCOM_FEATURE_CDS;
2305 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002306 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2307 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2308 CAM_QTI_FEATURE_PPEISCORE;
2309 }
Binhao Line406f062017-05-03 14:39:44 -07002310 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2311 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2312 CAM_QCOM_FEATURE_GOOG_ZOOM;
2313 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002314 video_stream_idx = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002315 } else {
2316 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2317 CAM_STREAM_TYPE_PREVIEW;
2318 if (m_bTnrEnabled && m_bTnrPreview) {
2319 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2320 CAM_QCOM_FEATURE_CPP_TNR;
2321 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2322 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2323 ~CAM_QCOM_FEATURE_CDS;
2324 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002325 if(!m_bSwTnrPreview) {
2326 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2327 ~CAM_QTI_FEATURE_SW_TNR;
2328 }
Binhao Line406f062017-05-03 14:39:44 -07002329 if (is_goog_zoom_preview_enabled) {
2330 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2331 CAM_QCOM_FEATURE_GOOG_ZOOM;
2332 }
Binhao Lincdb362a2017-04-20 13:31:54 -07002333 preview_stream_idx[preview_stream_cnt++] = mStreamConfigInfo.num_streams;
Thierry Strudel3d639192016-09-09 11:52:26 -07002334 padding_info.width_padding = mSurfaceStridePadding;
2335 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002336 previewSize.width = (int32_t)newStream->width;
2337 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002338 }
2339 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2340 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2341 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2342 newStream->height;
2343 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2344 newStream->width;
2345 }
2346 }
2347 break;
2348 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002349 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002350 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2351 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2352 if (bUseCommonFeatureMask)
2353 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2354 commonFeatureMask;
2355 else
2356 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2357 CAM_QCOM_FEATURE_NONE;
2358 } else {
2359 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2360 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2361 }
2362 break;
2363 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002364 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002365 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2366 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2367 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2368 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2369 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002370 /* Remove rotation if it is not supported
2371 for 4K LiveVideo snapshot case (online processing) */
2372 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2373 CAM_QCOM_FEATURE_ROTATION)) {
2374 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2375 &= ~CAM_QCOM_FEATURE_ROTATION;
2376 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002377 } else {
2378 if (bUseCommonFeatureMask &&
2379 isOnEncoder(maxViewfinderSize, newStream->width,
2380 newStream->height)) {
2381 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2382 } else {
2383 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2384 }
2385 }
2386 if (isZsl) {
2387 if (zslStream) {
2388 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2389 (int32_t)zslStream->width;
2390 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2391 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002392 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2393 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002394 } else {
2395 LOGE("Error, No ZSL stream identified");
2396 pthread_mutex_unlock(&mMutex);
2397 return -EINVAL;
2398 }
2399 } else if (m_bIs4KVideo) {
2400 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2401 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2402 } else if (bYuv888OverrideJpeg) {
2403 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2404 (int32_t)largeYuv888Size.width;
2405 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2406 (int32_t)largeYuv888Size.height;
2407 }
2408 break;
2409 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2410 case HAL_PIXEL_FORMAT_RAW16:
2411 case HAL_PIXEL_FORMAT_RAW10:
2412 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2413 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2414 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002415 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2416 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2417 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2418 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2419 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2420 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2421 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2422 gCamCapability[mCameraId]->dt[mPDIndex];
2423 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2424 gCamCapability[mCameraId]->vc[mPDIndex];
2425 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002426 break;
2427 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002428 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002429 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2430 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2431 break;
2432 }
2433 }
2434
2435 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2436 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2437 gCamCapability[mCameraId]->color_arrangement);
2438
2439 if (newStream->priv == NULL) {
2440 //New stream, construct channel
2441 switch (newStream->stream_type) {
2442 case CAMERA3_STREAM_INPUT:
2443 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2444 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2445 break;
2446 case CAMERA3_STREAM_BIDIRECTIONAL:
2447 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2448 GRALLOC_USAGE_HW_CAMERA_WRITE;
2449 break;
2450 case CAMERA3_STREAM_OUTPUT:
2451 /* For video encoding stream, set read/write rarely
2452 * flag so that they may be set to un-cached */
2453 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2454 newStream->usage |=
2455 (GRALLOC_USAGE_SW_READ_RARELY |
2456 GRALLOC_USAGE_SW_WRITE_RARELY |
2457 GRALLOC_USAGE_HW_CAMERA_WRITE);
2458 else if (IS_USAGE_ZSL(newStream->usage))
2459 {
2460 LOGD("ZSL usage flag skipping");
2461 }
2462 else if (newStream == zslStream
2463 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2464 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2465 } else
2466 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2467 break;
2468 default:
2469 LOGE("Invalid stream_type %d", newStream->stream_type);
2470 break;
2471 }
2472
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002473 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002474 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2475 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2476 QCamera3ProcessingChannel *channel = NULL;
2477 switch (newStream->format) {
2478 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2479 if ((newStream->usage &
2480 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2481 (streamList->operation_mode ==
2482 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2483 ) {
2484 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2485 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002486 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002487 this,
2488 newStream,
2489 (cam_stream_type_t)
2490 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2491 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2492 mMetadataChannel,
2493 0); //heap buffers are not required for HFR video channel
2494 if (channel == NULL) {
2495 LOGE("allocation of channel failed");
2496 pthread_mutex_unlock(&mMutex);
2497 return -ENOMEM;
2498 }
2499 //channel->getNumBuffers() will return 0 here so use
2500 //MAX_INFLIGH_HFR_REQUESTS
2501 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2502 newStream->priv = channel;
2503 LOGI("num video buffers in HFR mode: %d",
2504 MAX_INFLIGHT_HFR_REQUESTS);
2505 } else {
2506 /* Copy stream contents in HFR preview only case to create
2507 * dummy batch channel so that sensor streaming is in
2508 * HFR mode */
2509 if (!m_bIsVideo && (streamList->operation_mode ==
2510 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2511 mDummyBatchStream = *newStream;
2512 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002513 int bufferCount = MAX_INFLIGHT_REQUESTS;
2514 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2515 CAM_STREAM_TYPE_VIDEO) {
2516 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2517 bufferCount = MAX_VIDEO_BUFFERS;
2518 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002519 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2520 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002521 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002522 this,
2523 newStream,
2524 (cam_stream_type_t)
2525 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2526 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2527 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002528 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002529 if (channel == NULL) {
2530 LOGE("allocation of channel failed");
2531 pthread_mutex_unlock(&mMutex);
2532 return -ENOMEM;
2533 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002534 /* disable UBWC for preview, though supported,
2535 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002536 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002537 (previewSize.width == (int32_t)videoWidth)&&
2538 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002539 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002540 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002541 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002542 /* When goog_zoom is linked to the preview or video stream,
2543 * disable ubwc to the linked stream */
2544 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2545 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2546 channel->setUBWCEnabled(false);
2547 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002548 newStream->max_buffers = channel->getNumBuffers();
2549 newStream->priv = channel;
2550 }
2551 break;
2552 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2553 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2554 mChannelHandle,
2555 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002556 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002557 this,
2558 newStream,
2559 (cam_stream_type_t)
2560 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2561 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2562 mMetadataChannel);
2563 if (channel == NULL) {
2564 LOGE("allocation of YUV channel failed");
2565 pthread_mutex_unlock(&mMutex);
2566 return -ENOMEM;
2567 }
2568 newStream->max_buffers = channel->getNumBuffers();
2569 newStream->priv = channel;
2570 break;
2571 }
2572 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2573 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002574 case HAL_PIXEL_FORMAT_RAW10: {
2575 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2576 (HAL_DATASPACE_DEPTH != newStream->data_space))
2577 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002578 mRawChannel = new QCamera3RawChannel(
2579 mCameraHandle->camera_handle, mChannelHandle,
2580 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002581 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002582 this, newStream,
2583 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002584 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002585 if (mRawChannel == NULL) {
2586 LOGE("allocation of raw channel failed");
2587 pthread_mutex_unlock(&mMutex);
2588 return -ENOMEM;
2589 }
2590 newStream->max_buffers = mRawChannel->getNumBuffers();
2591 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2592 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002593 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002594 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002595 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2596 mDepthChannel = new QCamera3DepthChannel(
2597 mCameraHandle->camera_handle, mChannelHandle,
2598 mCameraHandle->ops, NULL, NULL, &padding_info,
2599 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2600 mMetadataChannel);
2601 if (NULL == mDepthChannel) {
2602 LOGE("Allocation of depth channel failed");
2603 pthread_mutex_unlock(&mMutex);
2604 return NO_MEMORY;
2605 }
2606 newStream->priv = mDepthChannel;
2607 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2608 } else {
2609 // Max live snapshot inflight buffer is 1. This is to mitigate
2610 // frame drop issues for video snapshot. The more buffers being
2611 // allocated, the more frame drops there are.
2612 mPictureChannel = new QCamera3PicChannel(
2613 mCameraHandle->camera_handle, mChannelHandle,
2614 mCameraHandle->ops, captureResultCb,
2615 setBufferErrorStatus, &padding_info, this, newStream,
2616 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2617 m_bIs4KVideo, isZsl, mMetadataChannel,
2618 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2619 if (mPictureChannel == NULL) {
2620 LOGE("allocation of channel failed");
2621 pthread_mutex_unlock(&mMutex);
2622 return -ENOMEM;
2623 }
2624 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2625 newStream->max_buffers = mPictureChannel->getNumBuffers();
2626 mPictureChannel->overrideYuvSize(
2627 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2628 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002629 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002630 break;
2631
2632 default:
2633 LOGE("not a supported format 0x%x", newStream->format);
2634 break;
2635 }
2636 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2637 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2638 } else {
2639 LOGE("Error, Unknown stream type");
2640 pthread_mutex_unlock(&mMutex);
2641 return -EINVAL;
2642 }
2643
2644 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002645 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2646 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002647 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002648 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002649 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2650 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2651 }
2652 }
2653
2654 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2655 it != mStreamInfo.end(); it++) {
2656 if ((*it)->stream == newStream) {
2657 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2658 break;
2659 }
2660 }
2661 } else {
2662 // Channel already exists for this stream
2663 // Do nothing for now
2664 }
2665 padding_info = gCamCapability[mCameraId]->padding_info;
2666
Emilian Peev7650c122017-01-19 08:24:33 -08002667 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002668 * since there is no real stream associated with it
2669 */
Emilian Peev7650c122017-01-19 08:24:33 -08002670 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002671 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2672 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002673 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002674 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002675 }
2676
Binhao Lincdb362a2017-04-20 13:31:54 -07002677 // By default, preview stream TNR is disabled.
2678 // Enable TNR to the preview stream if all conditions below are satisfied:
2679 // 1. resolution <= 1080p.
2680 // 2. preview resolution == video resolution.
2681 // 3. video stream TNR is enabled.
2682 // 4. EIS2.0
2683 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2684 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2685 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2686 if (m_bTnrEnabled && m_bTnrVideo && (atoi(is_type_value) == IS_TYPE_EIS_2_0) &&
2687 video_stream->width <= 1920 && video_stream->height <= 1080 &&
2688 video_stream->width == preview_stream->width &&
2689 video_stream->height == preview_stream->height) {
2690 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] |=
2691 CAM_QCOM_FEATURE_CPP_TNR;
2692 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2693 mStreamConfigInfo.postprocess_mask[preview_stream_idx[i]] &=
2694 ~CAM_QCOM_FEATURE_CDS;
2695 }
2696 }
2697
Thierry Strudel2896d122017-02-23 19:18:03 -08002698 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2699 onlyRaw = false;
2700 }
2701
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002702 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002703 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002704 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002705 cam_analysis_info_t analysisInfo;
2706 int32_t ret = NO_ERROR;
2707 ret = mCommon.getAnalysisInfo(
2708 FALSE,
2709 analysisFeatureMask,
2710 &analysisInfo);
2711 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002712 cam_color_filter_arrangement_t analysis_color_arrangement =
2713 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2714 CAM_FILTER_ARRANGEMENT_Y :
2715 gCamCapability[mCameraId]->color_arrangement);
2716 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2717 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002718 cam_dimension_t analysisDim;
2719 analysisDim = mCommon.getMatchingDimension(previewSize,
2720 analysisInfo.analysis_recommended_res);
2721
2722 mAnalysisChannel = new QCamera3SupportChannel(
2723 mCameraHandle->camera_handle,
2724 mChannelHandle,
2725 mCameraHandle->ops,
2726 &analysisInfo.analysis_padding_info,
2727 analysisFeatureMask,
2728 CAM_STREAM_TYPE_ANALYSIS,
2729 &analysisDim,
2730 (analysisInfo.analysis_format
2731 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2732 : CAM_FORMAT_YUV_420_NV21),
2733 analysisInfo.hw_analysis_supported,
2734 gCamCapability[mCameraId]->color_arrangement,
2735 this,
2736 0); // force buffer count to 0
2737 } else {
2738 LOGW("getAnalysisInfo failed, ret = %d", ret);
2739 }
2740 if (!mAnalysisChannel) {
2741 LOGW("Analysis channel cannot be created");
2742 }
2743 }
2744
Thierry Strudel3d639192016-09-09 11:52:26 -07002745 //RAW DUMP channel
2746 if (mEnableRawDump && isRawStreamRequested == false){
2747 cam_dimension_t rawDumpSize;
2748 rawDumpSize = getMaxRawSize(mCameraId);
2749 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2750 setPAAFSupport(rawDumpFeatureMask,
2751 CAM_STREAM_TYPE_RAW,
2752 gCamCapability[mCameraId]->color_arrangement);
2753 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2754 mChannelHandle,
2755 mCameraHandle->ops,
2756 rawDumpSize,
2757 &padding_info,
2758 this, rawDumpFeatureMask);
2759 if (!mRawDumpChannel) {
2760 LOGE("Raw Dump channel cannot be created");
2761 pthread_mutex_unlock(&mMutex);
2762 return -ENOMEM;
2763 }
2764 }
2765
Thierry Strudel3d639192016-09-09 11:52:26 -07002766 if (mAnalysisChannel) {
2767 cam_analysis_info_t analysisInfo;
2768 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2769 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2770 CAM_STREAM_TYPE_ANALYSIS;
2771 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2772 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002773 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002774 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2775 &analysisInfo);
2776 if (rc != NO_ERROR) {
2777 LOGE("getAnalysisInfo failed, ret = %d", rc);
2778 pthread_mutex_unlock(&mMutex);
2779 return rc;
2780 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002781 cam_color_filter_arrangement_t analysis_color_arrangement =
2782 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2783 CAM_FILTER_ARRANGEMENT_Y :
2784 gCamCapability[mCameraId]->color_arrangement);
2785 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2786 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2787 analysis_color_arrangement);
2788
Thierry Strudel3d639192016-09-09 11:52:26 -07002789 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002790 mCommon.getMatchingDimension(previewSize,
2791 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002792 mStreamConfigInfo.num_streams++;
2793 }
2794
Thierry Strudel2896d122017-02-23 19:18:03 -08002795 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002796 cam_analysis_info_t supportInfo;
2797 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2798 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2799 setPAAFSupport(callbackFeatureMask,
2800 CAM_STREAM_TYPE_CALLBACK,
2801 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002802 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002803 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002804 if (ret != NO_ERROR) {
2805 /* Ignore the error for Mono camera
2806 * because the PAAF bit mask is only set
2807 * for CAM_STREAM_TYPE_ANALYSIS stream type
2808 */
2809 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2810 LOGW("getAnalysisInfo failed, ret = %d", ret);
2811 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002812 }
2813 mSupportChannel = new QCamera3SupportChannel(
2814 mCameraHandle->camera_handle,
2815 mChannelHandle,
2816 mCameraHandle->ops,
2817 &gCamCapability[mCameraId]->padding_info,
2818 callbackFeatureMask,
2819 CAM_STREAM_TYPE_CALLBACK,
2820 &QCamera3SupportChannel::kDim,
2821 CAM_FORMAT_YUV_420_NV21,
2822 supportInfo.hw_analysis_supported,
2823 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002824 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002825 if (!mSupportChannel) {
2826 LOGE("dummy channel cannot be created");
2827 pthread_mutex_unlock(&mMutex);
2828 return -ENOMEM;
2829 }
2830 }
2831
2832 if (mSupportChannel) {
2833 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2834 QCamera3SupportChannel::kDim;
2835 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2836 CAM_STREAM_TYPE_CALLBACK;
2837 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2838 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2839 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2840 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2841 gCamCapability[mCameraId]->color_arrangement);
2842 mStreamConfigInfo.num_streams++;
2843 }
2844
2845 if (mRawDumpChannel) {
2846 cam_dimension_t rawSize;
2847 rawSize = getMaxRawSize(mCameraId);
2848 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2849 rawSize;
2850 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2851 CAM_STREAM_TYPE_RAW;
2852 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2853 CAM_QCOM_FEATURE_NONE;
2854 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2855 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2856 gCamCapability[mCameraId]->color_arrangement);
2857 mStreamConfigInfo.num_streams++;
2858 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002859
2860 if (mHdrPlusRawSrcChannel) {
2861 cam_dimension_t rawSize;
2862 rawSize = getMaxRawSize(mCameraId);
2863 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2864 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2865 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2866 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2867 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2868 gCamCapability[mCameraId]->color_arrangement);
2869 mStreamConfigInfo.num_streams++;
2870 }
2871
Thierry Strudel3d639192016-09-09 11:52:26 -07002872 /* In HFR mode, if video stream is not added, create a dummy channel so that
2873 * ISP can create a batch mode even for preview only case. This channel is
2874 * never 'start'ed (no stream-on), it is only 'initialized' */
2875 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2876 !m_bIsVideo) {
2877 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2878 setPAAFSupport(dummyFeatureMask,
2879 CAM_STREAM_TYPE_VIDEO,
2880 gCamCapability[mCameraId]->color_arrangement);
2881 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2882 mChannelHandle,
2883 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002884 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002885 this,
2886 &mDummyBatchStream,
2887 CAM_STREAM_TYPE_VIDEO,
2888 dummyFeatureMask,
2889 mMetadataChannel);
2890 if (NULL == mDummyBatchChannel) {
2891 LOGE("creation of mDummyBatchChannel failed."
2892 "Preview will use non-hfr sensor mode ");
2893 }
2894 }
2895 if (mDummyBatchChannel) {
2896 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2897 mDummyBatchStream.width;
2898 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2899 mDummyBatchStream.height;
2900 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2901 CAM_STREAM_TYPE_VIDEO;
2902 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2903 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2904 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2905 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2906 gCamCapability[mCameraId]->color_arrangement);
2907 mStreamConfigInfo.num_streams++;
2908 }
2909
2910 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2911 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002912 m_bIs4KVideo ? 0 :
2913 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002914
2915 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2916 for (pendingRequestIterator i = mPendingRequestsList.begin();
2917 i != mPendingRequestsList.end();) {
2918 i = erasePendingRequest(i);
2919 }
2920 mPendingFrameDropList.clear();
2921 // Initialize/Reset the pending buffers list
2922 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2923 req.mPendingBufferList.clear();
2924 }
2925 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2926
Thierry Strudel3d639192016-09-09 11:52:26 -07002927 mCurJpegMeta.clear();
2928 //Get min frame duration for this streams configuration
2929 deriveMinFrameDuration();
2930
Chien-Yu Chenee335912017-02-09 17:53:20 -08002931 mFirstPreviewIntentSeen = false;
2932
2933 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002934 {
2935 Mutex::Autolock l(gHdrPlusClientLock);
2936 disableHdrPlusModeLocked();
2937 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002938
Thierry Strudel3d639192016-09-09 11:52:26 -07002939 // Update state
2940 mState = CONFIGURED;
2941
Shuzhen Wang3c077d72017-04-20 22:48:59 -07002942 mFirstMetadataCallback = true;
2943
Thierry Strudel3d639192016-09-09 11:52:26 -07002944 pthread_mutex_unlock(&mMutex);
2945
2946 return rc;
2947}
2948
2949/*===========================================================================
2950 * FUNCTION : validateCaptureRequest
2951 *
2952 * DESCRIPTION: validate a capture request from camera service
2953 *
2954 * PARAMETERS :
2955 * @request : request from framework to process
2956 *
2957 * RETURN :
2958 *
2959 *==========================================================================*/
2960int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002961 camera3_capture_request_t *request,
2962 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002963{
2964 ssize_t idx = 0;
2965 const camera3_stream_buffer_t *b;
2966 CameraMetadata meta;
2967
2968 /* Sanity check the request */
2969 if (request == NULL) {
2970 LOGE("NULL capture request");
2971 return BAD_VALUE;
2972 }
2973
2974 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2975 /*settings cannot be null for the first request*/
2976 return BAD_VALUE;
2977 }
2978
2979 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002980 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2981 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002982 LOGE("Request %d: No output buffers provided!",
2983 __FUNCTION__, frameNumber);
2984 return BAD_VALUE;
2985 }
2986 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2987 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2988 request->num_output_buffers, MAX_NUM_STREAMS);
2989 return BAD_VALUE;
2990 }
2991 if (request->input_buffer != NULL) {
2992 b = request->input_buffer;
2993 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2994 LOGE("Request %d: Buffer %ld: Status not OK!",
2995 frameNumber, (long)idx);
2996 return BAD_VALUE;
2997 }
2998 if (b->release_fence != -1) {
2999 LOGE("Request %d: Buffer %ld: Has a release fence!",
3000 frameNumber, (long)idx);
3001 return BAD_VALUE;
3002 }
3003 if (b->buffer == NULL) {
3004 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3005 frameNumber, (long)idx);
3006 return BAD_VALUE;
3007 }
3008 }
3009
3010 // Validate all buffers
3011 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003012 if (b == NULL) {
3013 return BAD_VALUE;
3014 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003015 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003016 QCamera3ProcessingChannel *channel =
3017 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3018 if (channel == NULL) {
3019 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3020 frameNumber, (long)idx);
3021 return BAD_VALUE;
3022 }
3023 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3024 LOGE("Request %d: Buffer %ld: Status not OK!",
3025 frameNumber, (long)idx);
3026 return BAD_VALUE;
3027 }
3028 if (b->release_fence != -1) {
3029 LOGE("Request %d: Buffer %ld: Has a release fence!",
3030 frameNumber, (long)idx);
3031 return BAD_VALUE;
3032 }
3033 if (b->buffer == NULL) {
3034 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3035 frameNumber, (long)idx);
3036 return BAD_VALUE;
3037 }
3038 if (*(b->buffer) == NULL) {
3039 LOGE("Request %d: Buffer %ld: NULL private handle!",
3040 frameNumber, (long)idx);
3041 return BAD_VALUE;
3042 }
3043 idx++;
3044 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003045 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003046 return NO_ERROR;
3047}
3048
3049/*===========================================================================
3050 * FUNCTION : deriveMinFrameDuration
3051 *
3052 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3053 * on currently configured streams.
3054 *
3055 * PARAMETERS : NONE
3056 *
3057 * RETURN : NONE
3058 *
3059 *==========================================================================*/
3060void QCamera3HardwareInterface::deriveMinFrameDuration()
3061{
3062 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3063
3064 maxJpegDim = 0;
3065 maxProcessedDim = 0;
3066 maxRawDim = 0;
3067
3068 // Figure out maximum jpeg, processed, and raw dimensions
3069 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3070 it != mStreamInfo.end(); it++) {
3071
3072 // Input stream doesn't have valid stream_type
3073 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3074 continue;
3075
3076 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3077 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3078 if (dimension > maxJpegDim)
3079 maxJpegDim = dimension;
3080 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3081 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3082 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3083 if (dimension > maxRawDim)
3084 maxRawDim = dimension;
3085 } else {
3086 if (dimension > maxProcessedDim)
3087 maxProcessedDim = dimension;
3088 }
3089 }
3090
3091 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3092 MAX_SIZES_CNT);
3093
3094 //Assume all jpeg dimensions are in processed dimensions.
3095 if (maxJpegDim > maxProcessedDim)
3096 maxProcessedDim = maxJpegDim;
3097 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3098 if (maxProcessedDim > maxRawDim) {
3099 maxRawDim = INT32_MAX;
3100
3101 for (size_t i = 0; i < count; i++) {
3102 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3103 gCamCapability[mCameraId]->raw_dim[i].height;
3104 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3105 maxRawDim = dimension;
3106 }
3107 }
3108
3109 //Find minimum durations for processed, jpeg, and raw
3110 for (size_t i = 0; i < count; i++) {
3111 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3112 gCamCapability[mCameraId]->raw_dim[i].height) {
3113 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3114 break;
3115 }
3116 }
3117 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3118 for (size_t i = 0; i < count; i++) {
3119 if (maxProcessedDim ==
3120 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3121 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3122 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3123 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3124 break;
3125 }
3126 }
3127}
3128
3129/*===========================================================================
3130 * FUNCTION : getMinFrameDuration
3131 *
3132 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3133 * and current request configuration.
3134 *
3135 * PARAMETERS : @request: requset sent by the frameworks
3136 *
3137 * RETURN : min farme duration for a particular request
3138 *
3139 *==========================================================================*/
3140int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3141{
3142 bool hasJpegStream = false;
3143 bool hasRawStream = false;
3144 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3145 const camera3_stream_t *stream = request->output_buffers[i].stream;
3146 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3147 hasJpegStream = true;
3148 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3149 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3150 stream->format == HAL_PIXEL_FORMAT_RAW16)
3151 hasRawStream = true;
3152 }
3153
3154 if (!hasJpegStream)
3155 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3156 else
3157 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3158}
3159
3160/*===========================================================================
3161 * FUNCTION : handleBuffersDuringFlushLock
3162 *
3163 * DESCRIPTION: Account for buffers returned from back-end during flush
3164 * This function is executed while mMutex is held by the caller.
3165 *
3166 * PARAMETERS :
3167 * @buffer: image buffer for the callback
3168 *
3169 * RETURN :
3170 *==========================================================================*/
3171void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3172{
3173 bool buffer_found = false;
3174 for (List<PendingBuffersInRequest>::iterator req =
3175 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3176 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3177 for (List<PendingBufferInfo>::iterator i =
3178 req->mPendingBufferList.begin();
3179 i != req->mPendingBufferList.end(); i++) {
3180 if (i->buffer == buffer->buffer) {
3181 mPendingBuffersMap.numPendingBufsAtFlush--;
3182 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3183 buffer->buffer, req->frame_number,
3184 mPendingBuffersMap.numPendingBufsAtFlush);
3185 buffer_found = true;
3186 break;
3187 }
3188 }
3189 if (buffer_found) {
3190 break;
3191 }
3192 }
3193 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3194 //signal the flush()
3195 LOGD("All buffers returned to HAL. Continue flush");
3196 pthread_cond_signal(&mBuffersCond);
3197 }
3198}
3199
Thierry Strudel3d639192016-09-09 11:52:26 -07003200/*===========================================================================
3201 * FUNCTION : handleBatchMetadata
3202 *
3203 * DESCRIPTION: Handles metadata buffer callback in batch mode
3204 *
3205 * PARAMETERS : @metadata_buf: metadata buffer
3206 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3207 * the meta buf in this method
3208 *
3209 * RETURN :
3210 *
3211 *==========================================================================*/
3212void QCamera3HardwareInterface::handleBatchMetadata(
3213 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3214{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003215 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003216
3217 if (NULL == metadata_buf) {
3218 LOGE("metadata_buf is NULL");
3219 return;
3220 }
3221 /* In batch mode, the metdata will contain the frame number and timestamp of
3222 * the last frame in the batch. Eg: a batch containing buffers from request
3223 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3224 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3225 * multiple process_capture_results */
3226 metadata_buffer_t *metadata =
3227 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3228 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3229 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3230 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3231 uint32_t frame_number = 0, urgent_frame_number = 0;
3232 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3233 bool invalid_metadata = false;
3234 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3235 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003236 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003237
3238 int32_t *p_frame_number_valid =
3239 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3240 uint32_t *p_frame_number =
3241 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3242 int64_t *p_capture_time =
3243 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3244 int32_t *p_urgent_frame_number_valid =
3245 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3246 uint32_t *p_urgent_frame_number =
3247 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3248
3249 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3250 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3251 (NULL == p_urgent_frame_number)) {
3252 LOGE("Invalid metadata");
3253 invalid_metadata = true;
3254 } else {
3255 frame_number_valid = *p_frame_number_valid;
3256 last_frame_number = *p_frame_number;
3257 last_frame_capture_time = *p_capture_time;
3258 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3259 last_urgent_frame_number = *p_urgent_frame_number;
3260 }
3261
3262 /* In batchmode, when no video buffers are requested, set_parms are sent
3263 * for every capture_request. The difference between consecutive urgent
3264 * frame numbers and frame numbers should be used to interpolate the
3265 * corresponding frame numbers and time stamps */
3266 pthread_mutex_lock(&mMutex);
3267 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003268 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3269 if(idx < 0) {
3270 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3271 last_urgent_frame_number);
3272 mState = ERROR;
3273 pthread_mutex_unlock(&mMutex);
3274 return;
3275 }
3276 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003277 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3278 first_urgent_frame_number;
3279
3280 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3281 urgent_frame_number_valid,
3282 first_urgent_frame_number, last_urgent_frame_number);
3283 }
3284
3285 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003286 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3287 if(idx < 0) {
3288 LOGE("Invalid frame number received: %d. Irrecoverable error",
3289 last_frame_number);
3290 mState = ERROR;
3291 pthread_mutex_unlock(&mMutex);
3292 return;
3293 }
3294 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003295 frameNumDiff = last_frame_number + 1 -
3296 first_frame_number;
3297 mPendingBatchMap.removeItem(last_frame_number);
3298
3299 LOGD("frm: valid: %d frm_num: %d - %d",
3300 frame_number_valid,
3301 first_frame_number, last_frame_number);
3302
3303 }
3304 pthread_mutex_unlock(&mMutex);
3305
3306 if (urgent_frame_number_valid || frame_number_valid) {
3307 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3308 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3309 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3310 urgentFrameNumDiff, last_urgent_frame_number);
3311 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3312 LOGE("frameNumDiff: %d frameNum: %d",
3313 frameNumDiff, last_frame_number);
3314 }
3315
3316 for (size_t i = 0; i < loopCount; i++) {
3317 /* handleMetadataWithLock is called even for invalid_metadata for
3318 * pipeline depth calculation */
3319 if (!invalid_metadata) {
3320 /* Infer frame number. Batch metadata contains frame number of the
3321 * last frame */
3322 if (urgent_frame_number_valid) {
3323 if (i < urgentFrameNumDiff) {
3324 urgent_frame_number =
3325 first_urgent_frame_number + i;
3326 LOGD("inferred urgent frame_number: %d",
3327 urgent_frame_number);
3328 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3329 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3330 } else {
3331 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3332 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3333 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3334 }
3335 }
3336
3337 /* Infer frame number. Batch metadata contains frame number of the
3338 * last frame */
3339 if (frame_number_valid) {
3340 if (i < frameNumDiff) {
3341 frame_number = first_frame_number + i;
3342 LOGD("inferred frame_number: %d", frame_number);
3343 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3344 CAM_INTF_META_FRAME_NUMBER, frame_number);
3345 } else {
3346 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3347 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3348 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3349 }
3350 }
3351
3352 if (last_frame_capture_time) {
3353 //Infer timestamp
3354 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003355 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003356 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003357 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003358 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3359 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3360 LOGD("batch capture_time: %lld, capture_time: %lld",
3361 last_frame_capture_time, capture_time);
3362 }
3363 }
3364 pthread_mutex_lock(&mMutex);
3365 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003366 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003367 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3368 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003369 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003370 pthread_mutex_unlock(&mMutex);
3371 }
3372
3373 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003374 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003375 mMetadataChannel->bufDone(metadata_buf);
3376 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003377 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003378 }
3379}
3380
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003381void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3382 camera3_error_msg_code_t errorCode)
3383{
3384 camera3_notify_msg_t notify_msg;
3385 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3386 notify_msg.type = CAMERA3_MSG_ERROR;
3387 notify_msg.message.error.error_code = errorCode;
3388 notify_msg.message.error.error_stream = NULL;
3389 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003390 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003391
3392 return;
3393}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003394
3395/*===========================================================================
3396 * FUNCTION : sendPartialMetadataWithLock
3397 *
3398 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3399 *
3400 * PARAMETERS : @metadata: metadata buffer
3401 * @requestIter: The iterator for the pending capture request for
3402 * which the partial result is being sen
3403 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3404 * last urgent metadata in a batch. Always true for non-batch mode
3405 *
3406 * RETURN :
3407 *
3408 *==========================================================================*/
3409
3410void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3411 metadata_buffer_t *metadata,
3412 const pendingRequestIterator requestIter,
3413 bool lastUrgentMetadataInBatch)
3414{
3415 camera3_capture_result_t result;
3416 memset(&result, 0, sizeof(camera3_capture_result_t));
3417
3418 requestIter->partial_result_cnt++;
3419
3420 // Extract 3A metadata
3421 result.result = translateCbUrgentMetadataToResultMetadata(
3422 metadata, lastUrgentMetadataInBatch);
3423 // Populate metadata result
3424 result.frame_number = requestIter->frame_number;
3425 result.num_output_buffers = 0;
3426 result.output_buffers = NULL;
3427 result.partial_result = requestIter->partial_result_cnt;
3428
3429 {
3430 Mutex::Autolock l(gHdrPlusClientLock);
3431 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3432 // Notify HDR+ client about the partial metadata.
3433 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3434 result.partial_result == PARTIAL_RESULT_COUNT);
3435 }
3436 }
3437
3438 orchestrateResult(&result);
3439 LOGD("urgent frame_number = %u", result.frame_number);
3440 free_camera_metadata((camera_metadata_t *)result.result);
3441}
3442
Thierry Strudel3d639192016-09-09 11:52:26 -07003443/*===========================================================================
3444 * FUNCTION : handleMetadataWithLock
3445 *
3446 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3447 *
3448 * PARAMETERS : @metadata_buf: metadata buffer
3449 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3450 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003451 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3452 * last urgent metadata in a batch. Always true for non-batch mode
3453 * @lastMetadataInBatch: Boolean to indicate whether this is the
3454 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003455 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3456 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003457 *
3458 * RETURN :
3459 *
3460 *==========================================================================*/
3461void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003462 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003463 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3464 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003465{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003466 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003467 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3468 //during flush do not send metadata from this thread
3469 LOGD("not sending metadata during flush or when mState is error");
3470 if (free_and_bufdone_meta_buf) {
3471 mMetadataChannel->bufDone(metadata_buf);
3472 free(metadata_buf);
3473 }
3474 return;
3475 }
3476
3477 //not in flush
3478 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3479 int32_t frame_number_valid, urgent_frame_number_valid;
3480 uint32_t frame_number, urgent_frame_number;
3481 int64_t capture_time;
3482 nsecs_t currentSysTime;
3483
3484 int32_t *p_frame_number_valid =
3485 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3486 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3487 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3488 int32_t *p_urgent_frame_number_valid =
3489 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3490 uint32_t *p_urgent_frame_number =
3491 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3492 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3493 metadata) {
3494 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3495 *p_frame_number_valid, *p_frame_number);
3496 }
3497
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003498 camera_metadata_t *resultMetadata = nullptr;
3499
Thierry Strudel3d639192016-09-09 11:52:26 -07003500 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3501 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3502 LOGE("Invalid metadata");
3503 if (free_and_bufdone_meta_buf) {
3504 mMetadataChannel->bufDone(metadata_buf);
3505 free(metadata_buf);
3506 }
3507 goto done_metadata;
3508 }
3509 frame_number_valid = *p_frame_number_valid;
3510 frame_number = *p_frame_number;
3511 capture_time = *p_capture_time;
3512 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3513 urgent_frame_number = *p_urgent_frame_number;
3514 currentSysTime = systemTime(CLOCK_MONOTONIC);
3515
3516 // Detect if buffers from any requests are overdue
3517 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003518 int64_t timeout;
3519 {
3520 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3521 // If there is a pending HDR+ request, the following requests may be blocked until the
3522 // HDR+ request is done. So allow a longer timeout.
3523 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3524 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3525 }
3526
3527 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003528 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003529 assert(missed.stream->priv);
3530 if (missed.stream->priv) {
3531 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3532 assert(ch->mStreams[0]);
3533 if (ch->mStreams[0]) {
3534 LOGE("Cancel missing frame = %d, buffer = %p,"
3535 "stream type = %d, stream format = %d",
3536 req.frame_number, missed.buffer,
3537 ch->mStreams[0]->getMyType(), missed.stream->format);
3538 ch->timeoutFrame(req.frame_number);
3539 }
3540 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003541 }
3542 }
3543 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003544 //For the very first metadata callback, regardless whether it contains valid
3545 //frame number, send the partial metadata for the jumpstarting requests.
3546 //Note that this has to be done even if the metadata doesn't contain valid
3547 //urgent frame number, because in the case only 1 request is ever submitted
3548 //to HAL, there won't be subsequent valid urgent frame number.
3549 if (mFirstMetadataCallback) {
3550 for (pendingRequestIterator i =
3551 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3552 if (i->bUseFirstPartial) {
3553 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
3554 }
3555 }
3556 mFirstMetadataCallback = false;
3557 }
3558
Thierry Strudel3d639192016-09-09 11:52:26 -07003559 //Partial result on process_capture_result for timestamp
3560 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003561 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003562
3563 //Recieved an urgent Frame Number, handle it
3564 //using partial results
3565 for (pendingRequestIterator i =
3566 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3567 LOGD("Iterator Frame = %d urgent frame = %d",
3568 i->frame_number, urgent_frame_number);
3569
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00003570 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003571 (i->partial_result_cnt == 0)) {
3572 LOGE("Error: HAL missed urgent metadata for frame number %d",
3573 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003574 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003575 }
3576
3577 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003578 i->partial_result_cnt == 0) {
3579 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003580 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3581 // Instant AEC settled for this frame.
3582 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3583 mInstantAECSettledFrameNumber = urgent_frame_number;
3584 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003585 break;
3586 }
3587 }
3588 }
3589
3590 if (!frame_number_valid) {
3591 LOGD("Not a valid normal frame number, used as SOF only");
3592 if (free_and_bufdone_meta_buf) {
3593 mMetadataChannel->bufDone(metadata_buf);
3594 free(metadata_buf);
3595 }
3596 goto done_metadata;
3597 }
3598 LOGH("valid frame_number = %u, capture_time = %lld",
3599 frame_number, capture_time);
3600
Emilian Peev7650c122017-01-19 08:24:33 -08003601 if (metadata->is_depth_data_valid) {
3602 handleDepthDataLocked(metadata->depth_data, frame_number);
3603 }
3604
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003605 // Check whether any stream buffer corresponding to this is dropped or not
3606 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3607 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3608 for (auto & pendingRequest : mPendingRequestsList) {
3609 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3610 mInstantAECSettledFrameNumber)) {
3611 camera3_notify_msg_t notify_msg = {};
3612 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003613 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003614 QCamera3ProcessingChannel *channel =
3615 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003616 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003617 if (p_cam_frame_drop) {
3618 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003619 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003620 // Got the stream ID for drop frame.
3621 dropFrame = true;
3622 break;
3623 }
3624 }
3625 } else {
3626 // This is instant AEC case.
3627 // For instant AEC drop the stream untill AEC is settled.
3628 dropFrame = true;
3629 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003630
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003631 if (dropFrame) {
3632 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3633 if (p_cam_frame_drop) {
3634 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003635 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003636 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003637 } else {
3638 // For instant AEC, inform frame drop and frame number
3639 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3640 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003641 pendingRequest.frame_number, streamID,
3642 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003643 }
3644 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003645 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003646 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003647 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003648 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003649 if (p_cam_frame_drop) {
3650 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003651 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003652 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003653 } else {
3654 // For instant AEC, inform frame drop and frame number
3655 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3656 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003657 pendingRequest.frame_number, streamID,
3658 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003659 }
3660 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003661 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003662 PendingFrameDrop.stream_ID = streamID;
3663 // Add the Frame drop info to mPendingFrameDropList
3664 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003665 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003666 }
3667 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003668 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003669
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003670 for (auto & pendingRequest : mPendingRequestsList) {
3671 // Find the pending request with the frame number.
3672 if (pendingRequest.frame_number == frame_number) {
3673 // Update the sensor timestamp.
3674 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003675
Thierry Strudel3d639192016-09-09 11:52:26 -07003676
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003677 /* Set the timestamp in display metadata so that clients aware of
3678 private_handle such as VT can use this un-modified timestamps.
3679 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003680 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003681
Thierry Strudel3d639192016-09-09 11:52:26 -07003682 // Find channel requiring metadata, meaning internal offline postprocess
3683 // is needed.
3684 //TODO: for now, we don't support two streams requiring metadata at the same time.
3685 // (because we are not making copies, and metadata buffer is not reference counted.
3686 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003687 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3688 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003689 if (iter->need_metadata) {
3690 internalPproc = true;
3691 QCamera3ProcessingChannel *channel =
3692 (QCamera3ProcessingChannel *)iter->stream->priv;
3693 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003694 if(p_is_metabuf_queued != NULL) {
3695 *p_is_metabuf_queued = true;
3696 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003697 break;
3698 }
3699 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003700 for (auto itr = pendingRequest.internalRequestList.begin();
3701 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003702 if (itr->need_metadata) {
3703 internalPproc = true;
3704 QCamera3ProcessingChannel *channel =
3705 (QCamera3ProcessingChannel *)itr->stream->priv;
3706 channel->queueReprocMetadata(metadata_buf);
3707 break;
3708 }
3709 }
3710
Thierry Strudel54dc9782017-02-15 12:12:10 -08003711 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003712
3713 bool *enableZsl = nullptr;
3714 if (gExposeEnableZslKey) {
3715 enableZsl = &pendingRequest.enableZsl;
3716 }
3717
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003718 resultMetadata = translateFromHalMetadata(metadata,
3719 pendingRequest.timestamp, pendingRequest.request_id,
3720 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3721 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003722 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003723 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003724 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003725 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003726 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003727 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003728
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003729 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003730
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003731 if (pendingRequest.blob_request) {
3732 //Dump tuning metadata if enabled and available
3733 char prop[PROPERTY_VALUE_MAX];
3734 memset(prop, 0, sizeof(prop));
3735 property_get("persist.camera.dumpmetadata", prop, "0");
3736 int32_t enabled = atoi(prop);
3737 if (enabled && metadata->is_tuning_params_valid) {
3738 dumpMetadataToFile(metadata->tuning_params,
3739 mMetaFrameCount,
3740 enabled,
3741 "Snapshot",
3742 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003743 }
3744 }
3745
3746 if (!internalPproc) {
3747 LOGD("couldn't find need_metadata for this metadata");
3748 // Return metadata buffer
3749 if (free_and_bufdone_meta_buf) {
3750 mMetadataChannel->bufDone(metadata_buf);
3751 free(metadata_buf);
3752 }
3753 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003754
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003755 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003756 }
3757 }
3758
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003759 // Try to send out shutter callbacks and capture results.
3760 handlePendingResultsWithLock(frame_number, resultMetadata);
3761 return;
3762
Thierry Strudel3d639192016-09-09 11:52:26 -07003763done_metadata:
3764 for (pendingRequestIterator i = mPendingRequestsList.begin();
3765 i != mPendingRequestsList.end() ;i++) {
3766 i->pipeline_depth++;
3767 }
3768 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3769 unblockRequestIfNecessary();
3770}
3771
3772/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003773 * FUNCTION : handleDepthDataWithLock
3774 *
3775 * DESCRIPTION: Handles incoming depth data
3776 *
3777 * PARAMETERS : @depthData : Depth data
3778 * @frameNumber: Frame number of the incoming depth data
3779 *
3780 * RETURN :
3781 *
3782 *==========================================================================*/
3783void QCamera3HardwareInterface::handleDepthDataLocked(
3784 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3785 uint32_t currentFrameNumber;
3786 buffer_handle_t *depthBuffer;
3787
3788 if (nullptr == mDepthChannel) {
3789 LOGE("Depth channel not present!");
3790 return;
3791 }
3792
3793 camera3_stream_buffer_t resultBuffer =
3794 {.acquire_fence = -1,
3795 .release_fence = -1,
3796 .status = CAMERA3_BUFFER_STATUS_OK,
3797 .buffer = nullptr,
3798 .stream = mDepthChannel->getStream()};
3799 camera3_capture_result_t result =
3800 {.result = nullptr,
3801 .num_output_buffers = 1,
3802 .output_buffers = &resultBuffer,
3803 .partial_result = 0,
3804 .frame_number = 0};
3805
3806 do {
3807 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3808 if (nullptr == depthBuffer) {
3809 break;
3810 }
3811
3812 result.frame_number = currentFrameNumber;
3813 resultBuffer.buffer = depthBuffer;
3814 if (currentFrameNumber == frameNumber) {
3815 int32_t rc = mDepthChannel->populateDepthData(depthData,
3816 frameNumber);
3817 if (NO_ERROR != rc) {
3818 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3819 } else {
3820 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3821 }
3822 } else if (currentFrameNumber > frameNumber) {
3823 break;
3824 } else {
3825 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3826 {{currentFrameNumber, mDepthChannel->getStream(),
3827 CAMERA3_MSG_ERROR_BUFFER}}};
3828 orchestrateNotify(&notify_msg);
3829
3830 LOGE("Depth buffer for frame number: %d is missing "
3831 "returning back!", currentFrameNumber);
3832 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3833 }
3834 mDepthChannel->unmapBuffer(currentFrameNumber);
3835
3836 orchestrateResult(&result);
3837 } while (currentFrameNumber < frameNumber);
3838}
3839
3840/*===========================================================================
3841 * FUNCTION : notifyErrorFoPendingDepthData
3842 *
3843 * DESCRIPTION: Returns error for any pending depth buffers
3844 *
3845 * PARAMETERS : depthCh - depth channel that needs to get flushed
3846 *
3847 * RETURN :
3848 *
3849 *==========================================================================*/
3850void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3851 QCamera3DepthChannel *depthCh) {
3852 uint32_t currentFrameNumber;
3853 buffer_handle_t *depthBuffer;
3854
3855 if (nullptr == depthCh) {
3856 return;
3857 }
3858
3859 camera3_notify_msg_t notify_msg =
3860 {.type = CAMERA3_MSG_ERROR,
3861 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3862 camera3_stream_buffer_t resultBuffer =
3863 {.acquire_fence = -1,
3864 .release_fence = -1,
3865 .buffer = nullptr,
3866 .stream = depthCh->getStream(),
3867 .status = CAMERA3_BUFFER_STATUS_ERROR};
3868 camera3_capture_result_t result =
3869 {.result = nullptr,
3870 .frame_number = 0,
3871 .num_output_buffers = 1,
3872 .partial_result = 0,
3873 .output_buffers = &resultBuffer};
3874
3875 while (nullptr !=
3876 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3877 depthCh->unmapBuffer(currentFrameNumber);
3878
3879 notify_msg.message.error.frame_number = currentFrameNumber;
3880 orchestrateNotify(&notify_msg);
3881
3882 resultBuffer.buffer = depthBuffer;
3883 result.frame_number = currentFrameNumber;
3884 orchestrateResult(&result);
3885 };
3886}
3887
3888/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003889 * FUNCTION : hdrPlusPerfLock
3890 *
3891 * DESCRIPTION: perf lock for HDR+ using custom intent
3892 *
3893 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3894 *
3895 * RETURN : None
3896 *
3897 *==========================================================================*/
3898void QCamera3HardwareInterface::hdrPlusPerfLock(
3899 mm_camera_super_buf_t *metadata_buf)
3900{
3901 if (NULL == metadata_buf) {
3902 LOGE("metadata_buf is NULL");
3903 return;
3904 }
3905 metadata_buffer_t *metadata =
3906 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3907 int32_t *p_frame_number_valid =
3908 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3909 uint32_t *p_frame_number =
3910 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3911
3912 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3913 LOGE("%s: Invalid metadata", __func__);
3914 return;
3915 }
3916
3917 //acquire perf lock for 5 sec after the last HDR frame is captured
3918 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3919 if ((p_frame_number != NULL) &&
3920 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003921 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003922 }
3923 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003924}
3925
3926/*===========================================================================
3927 * FUNCTION : handleInputBufferWithLock
3928 *
3929 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3930 *
3931 * PARAMETERS : @frame_number: frame number of the input buffer
3932 *
3933 * RETURN :
3934 *
3935 *==========================================================================*/
3936void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3937{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003938 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003939 pendingRequestIterator i = mPendingRequestsList.begin();
3940 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3941 i++;
3942 }
3943 if (i != mPendingRequestsList.end() && i->input_buffer) {
3944 //found the right request
3945 if (!i->shutter_notified) {
3946 CameraMetadata settings;
3947 camera3_notify_msg_t notify_msg;
3948 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3949 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3950 if(i->settings) {
3951 settings = i->settings;
3952 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3953 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3954 } else {
3955 LOGE("No timestamp in input settings! Using current one.");
3956 }
3957 } else {
3958 LOGE("Input settings missing!");
3959 }
3960
3961 notify_msg.type = CAMERA3_MSG_SHUTTER;
3962 notify_msg.message.shutter.frame_number = frame_number;
3963 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003964 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003965 i->shutter_notified = true;
3966 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3967 i->frame_number, notify_msg.message.shutter.timestamp);
3968 }
3969
3970 if (i->input_buffer->release_fence != -1) {
3971 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3972 close(i->input_buffer->release_fence);
3973 if (rc != OK) {
3974 LOGE("input buffer sync wait failed %d", rc);
3975 }
3976 }
3977
3978 camera3_capture_result result;
3979 memset(&result, 0, sizeof(camera3_capture_result));
3980 result.frame_number = frame_number;
3981 result.result = i->settings;
3982 result.input_buffer = i->input_buffer;
3983 result.partial_result = PARTIAL_RESULT_COUNT;
3984
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003985 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003986 LOGD("Input request metadata and input buffer frame_number = %u",
3987 i->frame_number);
3988 i = erasePendingRequest(i);
3989 } else {
3990 LOGE("Could not find input request for frame number %d", frame_number);
3991 }
3992}
3993
3994/*===========================================================================
3995 * FUNCTION : handleBufferWithLock
3996 *
3997 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3998 *
3999 * PARAMETERS : @buffer: image buffer for the callback
4000 * @frame_number: frame number of the image buffer
4001 *
4002 * RETURN :
4003 *
4004 *==========================================================================*/
4005void QCamera3HardwareInterface::handleBufferWithLock(
4006 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4007{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004008 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004009
4010 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4011 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4012 }
4013
Thierry Strudel3d639192016-09-09 11:52:26 -07004014 /* Nothing to be done during error state */
4015 if ((ERROR == mState) || (DEINIT == mState)) {
4016 return;
4017 }
4018 if (mFlushPerf) {
4019 handleBuffersDuringFlushLock(buffer);
4020 return;
4021 }
4022 //not in flush
4023 // If the frame number doesn't exist in the pending request list,
4024 // directly send the buffer to the frameworks, and update pending buffers map
4025 // Otherwise, book-keep the buffer.
4026 pendingRequestIterator i = mPendingRequestsList.begin();
4027 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4028 i++;
4029 }
4030 if (i == mPendingRequestsList.end()) {
4031 // Verify all pending requests frame_numbers are greater
4032 for (pendingRequestIterator j = mPendingRequestsList.begin();
4033 j != mPendingRequestsList.end(); j++) {
4034 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
4035 LOGW("Error: pending live frame number %d is smaller than %d",
4036 j->frame_number, frame_number);
4037 }
4038 }
4039 camera3_capture_result_t result;
4040 memset(&result, 0, sizeof(camera3_capture_result_t));
4041 result.result = NULL;
4042 result.frame_number = frame_number;
4043 result.num_output_buffers = 1;
4044 result.partial_result = 0;
4045 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4046 m != mPendingFrameDropList.end(); m++) {
4047 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4048 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4049 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4050 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4051 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4052 frame_number, streamID);
4053 m = mPendingFrameDropList.erase(m);
4054 break;
4055 }
4056 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004057 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07004058 result.output_buffers = buffer;
4059 LOGH("result frame_number = %d, buffer = %p",
4060 frame_number, buffer->buffer);
4061
4062 mPendingBuffersMap.removeBuf(buffer->buffer);
4063
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004064 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004065 } else {
4066 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004067 if (i->input_buffer->release_fence != -1) {
4068 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
4069 close(i->input_buffer->release_fence);
4070 if (rc != OK) {
4071 LOGE("input buffer sync wait failed %d", rc);
4072 }
4073 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004074 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004075
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004076 // Put buffer into the pending request
4077 for (auto &requestedBuffer : i->buffers) {
4078 if (requestedBuffer.stream == buffer->stream) {
4079 if (requestedBuffer.buffer != nullptr) {
4080 LOGE("Error: buffer is already set");
4081 } else {
4082 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
4083 sizeof(camera3_stream_buffer_t));
4084 *(requestedBuffer.buffer) = *buffer;
4085 LOGH("cache buffer %p at result frame_number %u",
4086 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07004087 }
4088 }
4089 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004090
4091 if (i->input_buffer) {
4092 // For a reprocessing request, try to send out shutter callback and result metadata.
4093 handlePendingResultsWithLock(frame_number, nullptr);
4094 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004095 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004096
4097 if (mPreviewStarted == false) {
4098 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4099 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004100 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4101
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004102 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4103 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4104 mPreviewStarted = true;
4105
4106 // Set power hint for preview
4107 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4108 }
4109 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004110}
4111
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004112void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4113 const camera_metadata_t *resultMetadata)
4114{
4115 // Find the pending request for this result metadata.
4116 auto requestIter = mPendingRequestsList.begin();
4117 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4118 requestIter++;
4119 }
4120
4121 if (requestIter == mPendingRequestsList.end()) {
4122 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4123 return;
4124 }
4125
4126 // Update the result metadata
4127 requestIter->resultMetadata = resultMetadata;
4128
4129 // Check what type of request this is.
4130 bool liveRequest = false;
4131 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004132 // HDR+ request doesn't have partial results.
4133 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004134 } else if (requestIter->input_buffer != nullptr) {
4135 // Reprocessing request result is the same as settings.
4136 requestIter->resultMetadata = requestIter->settings;
4137 // Reprocessing request doesn't have partial results.
4138 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4139 } else {
4140 liveRequest = true;
4141 requestIter->partial_result_cnt++;
4142 mPendingLiveRequest--;
4143
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004144 {
4145 Mutex::Autolock l(gHdrPlusClientLock);
4146 // For a live request, send the metadata to HDR+ client.
4147 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4148 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4149 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4150 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004151 }
4152 }
4153
4154 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4155 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4156 bool readyToSend = true;
4157
4158 // Iterate through the pending requests to send out shutter callbacks and results that are
4159 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4160 // live requests that don't have result metadata yet.
4161 auto iter = mPendingRequestsList.begin();
4162 while (iter != mPendingRequestsList.end()) {
4163 // Check if current pending request is ready. If it's not ready, the following pending
4164 // requests are also not ready.
4165 if (readyToSend && iter->resultMetadata == nullptr) {
4166 readyToSend = false;
4167 }
4168
4169 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4170
4171 std::vector<camera3_stream_buffer_t> outputBuffers;
4172
4173 camera3_capture_result_t result = {};
4174 result.frame_number = iter->frame_number;
4175 result.result = iter->resultMetadata;
4176 result.partial_result = iter->partial_result_cnt;
4177
4178 // If this pending buffer has result metadata, we may be able to send out shutter callback
4179 // and result metadata.
4180 if (iter->resultMetadata != nullptr) {
4181 if (!readyToSend) {
4182 // If any of the previous pending request is not ready, this pending request is
4183 // also not ready to send in order to keep shutter callbacks and result metadata
4184 // in order.
4185 iter++;
4186 continue;
4187 }
4188
4189 // Invoke shutter callback if not yet.
4190 if (!iter->shutter_notified) {
4191 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4192
4193 // Find the timestamp in HDR+ result metadata
4194 camera_metadata_ro_entry_t entry;
4195 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4196 ANDROID_SENSOR_TIMESTAMP, &entry);
4197 if (res != OK) {
4198 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4199 __FUNCTION__, iter->frame_number, strerror(-res), res);
4200 } else {
4201 timestamp = entry.data.i64[0];
4202 }
4203
4204 camera3_notify_msg_t notify_msg = {};
4205 notify_msg.type = CAMERA3_MSG_SHUTTER;
4206 notify_msg.message.shutter.frame_number = iter->frame_number;
4207 notify_msg.message.shutter.timestamp = timestamp;
4208 orchestrateNotify(&notify_msg);
4209 iter->shutter_notified = true;
4210 }
4211
4212 result.input_buffer = iter->input_buffer;
4213
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004214 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4215 // If the result metadata belongs to a live request, notify errors for previous pending
4216 // live requests.
4217 mPendingLiveRequest--;
4218
4219 CameraMetadata dummyMetadata;
4220 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4221 result.result = dummyMetadata.release();
4222
4223 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004224
4225 // partial_result should be PARTIAL_RESULT_CNT in case of
4226 // ERROR_RESULT.
4227 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4228 result.partial_result = PARTIAL_RESULT_COUNT;
4229
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004230 } else {
4231 iter++;
4232 continue;
4233 }
4234
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004235 // Prepare output buffer array
4236 for (auto bufferInfoIter = iter->buffers.begin();
4237 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4238 if (bufferInfoIter->buffer != nullptr) {
4239
4240 QCamera3Channel *channel =
4241 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4242 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4243
4244 // Check if this buffer is a dropped frame.
4245 auto frameDropIter = mPendingFrameDropList.begin();
4246 while (frameDropIter != mPendingFrameDropList.end()) {
4247 if((frameDropIter->stream_ID == streamID) &&
4248 (frameDropIter->frame_number == frameNumber)) {
4249 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4250 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4251 streamID);
4252 mPendingFrameDropList.erase(frameDropIter);
4253 break;
4254 } else {
4255 frameDropIter++;
4256 }
4257 }
4258
4259 // Check buffer error status
4260 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4261 bufferInfoIter->buffer->buffer);
4262 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4263
4264 outputBuffers.push_back(*(bufferInfoIter->buffer));
4265 free(bufferInfoIter->buffer);
4266 bufferInfoIter->buffer = NULL;
4267 }
4268 }
4269
4270 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4271 result.num_output_buffers = outputBuffers.size();
4272
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004273 orchestrateResult(&result);
4274
4275 // For reprocessing, result metadata is the same as settings so do not free it here to
4276 // avoid double free.
4277 if (result.result != iter->settings) {
4278 free_camera_metadata((camera_metadata_t *)result.result);
4279 }
4280 iter->resultMetadata = nullptr;
4281 iter = erasePendingRequest(iter);
4282 }
4283
4284 if (liveRequest) {
4285 for (auto &iter : mPendingRequestsList) {
4286 // Increment pipeline depth for the following pending requests.
4287 if (iter.frame_number > frameNumber) {
4288 iter.pipeline_depth++;
4289 }
4290 }
4291 }
4292
4293 unblockRequestIfNecessary();
4294}
4295
Thierry Strudel3d639192016-09-09 11:52:26 -07004296/*===========================================================================
4297 * FUNCTION : unblockRequestIfNecessary
4298 *
4299 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4300 * that mMutex is held when this function is called.
4301 *
4302 * PARAMETERS :
4303 *
4304 * RETURN :
4305 *
4306 *==========================================================================*/
4307void QCamera3HardwareInterface::unblockRequestIfNecessary()
4308{
4309 // Unblock process_capture_request
4310 pthread_cond_signal(&mRequestCond);
4311}
4312
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004313/*===========================================================================
4314 * FUNCTION : isHdrSnapshotRequest
4315 *
4316 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4317 *
4318 * PARAMETERS : camera3 request structure
4319 *
4320 * RETURN : boolean decision variable
4321 *
4322 *==========================================================================*/
4323bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4324{
4325 if (request == NULL) {
4326 LOGE("Invalid request handle");
4327 assert(0);
4328 return false;
4329 }
4330
4331 if (!mForceHdrSnapshot) {
4332 CameraMetadata frame_settings;
4333 frame_settings = request->settings;
4334
4335 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4336 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4337 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4338 return false;
4339 }
4340 } else {
4341 return false;
4342 }
4343
4344 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4345 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4346 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4347 return false;
4348 }
4349 } else {
4350 return false;
4351 }
4352 }
4353
4354 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4355 if (request->output_buffers[i].stream->format
4356 == HAL_PIXEL_FORMAT_BLOB) {
4357 return true;
4358 }
4359 }
4360
4361 return false;
4362}
4363/*===========================================================================
4364 * FUNCTION : orchestrateRequest
4365 *
4366 * DESCRIPTION: Orchestrates a capture request from camera service
4367 *
4368 * PARAMETERS :
4369 * @request : request from framework to process
4370 *
4371 * RETURN : Error status codes
4372 *
4373 *==========================================================================*/
4374int32_t QCamera3HardwareInterface::orchestrateRequest(
4375 camera3_capture_request_t *request)
4376{
4377
4378 uint32_t originalFrameNumber = request->frame_number;
4379 uint32_t originalOutputCount = request->num_output_buffers;
4380 const camera_metadata_t *original_settings = request->settings;
4381 List<InternalRequest> internallyRequestedStreams;
4382 List<InternalRequest> emptyInternalList;
4383
4384 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4385 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4386 uint32_t internalFrameNumber;
4387 CameraMetadata modified_meta;
4388
4389
4390 /* Add Blob channel to list of internally requested streams */
4391 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4392 if (request->output_buffers[i].stream->format
4393 == HAL_PIXEL_FORMAT_BLOB) {
4394 InternalRequest streamRequested;
4395 streamRequested.meteringOnly = 1;
4396 streamRequested.need_metadata = 0;
4397 streamRequested.stream = request->output_buffers[i].stream;
4398 internallyRequestedStreams.push_back(streamRequested);
4399 }
4400 }
4401 request->num_output_buffers = 0;
4402 auto itr = internallyRequestedStreams.begin();
4403
4404 /* Modify setting to set compensation */
4405 modified_meta = request->settings;
4406 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4407 uint8_t aeLock = 1;
4408 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4409 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4410 camera_metadata_t *modified_settings = modified_meta.release();
4411 request->settings = modified_settings;
4412
4413 /* Capture Settling & -2x frame */
4414 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4415 request->frame_number = internalFrameNumber;
4416 processCaptureRequest(request, internallyRequestedStreams);
4417
4418 request->num_output_buffers = originalOutputCount;
4419 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4420 request->frame_number = internalFrameNumber;
4421 processCaptureRequest(request, emptyInternalList);
4422 request->num_output_buffers = 0;
4423
4424 modified_meta = modified_settings;
4425 expCompensation = 0;
4426 aeLock = 1;
4427 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4428 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4429 modified_settings = modified_meta.release();
4430 request->settings = modified_settings;
4431
4432 /* Capture Settling & 0X frame */
4433
4434 itr = internallyRequestedStreams.begin();
4435 if (itr == internallyRequestedStreams.end()) {
4436 LOGE("Error Internally Requested Stream list is empty");
4437 assert(0);
4438 } else {
4439 itr->need_metadata = 0;
4440 itr->meteringOnly = 1;
4441 }
4442
4443 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4444 request->frame_number = internalFrameNumber;
4445 processCaptureRequest(request, internallyRequestedStreams);
4446
4447 itr = internallyRequestedStreams.begin();
4448 if (itr == internallyRequestedStreams.end()) {
4449 ALOGE("Error Internally Requested Stream list is empty");
4450 assert(0);
4451 } else {
4452 itr->need_metadata = 1;
4453 itr->meteringOnly = 0;
4454 }
4455
4456 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4457 request->frame_number = internalFrameNumber;
4458 processCaptureRequest(request, internallyRequestedStreams);
4459
4460 /* Capture 2X frame*/
4461 modified_meta = modified_settings;
4462 expCompensation = GB_HDR_2X_STEP_EV;
4463 aeLock = 1;
4464 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4465 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4466 modified_settings = modified_meta.release();
4467 request->settings = modified_settings;
4468
4469 itr = internallyRequestedStreams.begin();
4470 if (itr == internallyRequestedStreams.end()) {
4471 ALOGE("Error Internally Requested Stream list is empty");
4472 assert(0);
4473 } else {
4474 itr->need_metadata = 0;
4475 itr->meteringOnly = 1;
4476 }
4477 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4478 request->frame_number = internalFrameNumber;
4479 processCaptureRequest(request, internallyRequestedStreams);
4480
4481 itr = internallyRequestedStreams.begin();
4482 if (itr == internallyRequestedStreams.end()) {
4483 ALOGE("Error Internally Requested Stream list is empty");
4484 assert(0);
4485 } else {
4486 itr->need_metadata = 1;
4487 itr->meteringOnly = 0;
4488 }
4489
4490 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4491 request->frame_number = internalFrameNumber;
4492 processCaptureRequest(request, internallyRequestedStreams);
4493
4494
4495 /* Capture 2X on original streaming config*/
4496 internallyRequestedStreams.clear();
4497
4498 /* Restore original settings pointer */
4499 request->settings = original_settings;
4500 } else {
4501 uint32_t internalFrameNumber;
4502 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4503 request->frame_number = internalFrameNumber;
4504 return processCaptureRequest(request, internallyRequestedStreams);
4505 }
4506
4507 return NO_ERROR;
4508}
4509
4510/*===========================================================================
4511 * FUNCTION : orchestrateResult
4512 *
4513 * DESCRIPTION: Orchestrates a capture result to camera service
4514 *
4515 * PARAMETERS :
4516 * @request : request from framework to process
4517 *
4518 * RETURN :
4519 *
4520 *==========================================================================*/
4521void QCamera3HardwareInterface::orchestrateResult(
4522 camera3_capture_result_t *result)
4523{
4524 uint32_t frameworkFrameNumber;
4525 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4526 frameworkFrameNumber);
4527 if (rc != NO_ERROR) {
4528 LOGE("Cannot find translated frameworkFrameNumber");
4529 assert(0);
4530 } else {
4531 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004532 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004533 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004534 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004535 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4536 camera_metadata_entry_t entry;
4537 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4538 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004539 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004540 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4541 if (ret != OK)
4542 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004543 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004544 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004545 result->frame_number = frameworkFrameNumber;
4546 mCallbackOps->process_capture_result(mCallbackOps, result);
4547 }
4548 }
4549}
4550
4551/*===========================================================================
4552 * FUNCTION : orchestrateNotify
4553 *
4554 * DESCRIPTION: Orchestrates a notify to camera service
4555 *
4556 * PARAMETERS :
4557 * @request : request from framework to process
4558 *
4559 * RETURN :
4560 *
4561 *==========================================================================*/
4562void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4563{
4564 uint32_t frameworkFrameNumber;
4565 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004566 int32_t rc = NO_ERROR;
4567
4568 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004569 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004570
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004571 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004572 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4573 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4574 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004575 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004576 LOGE("Cannot find translated frameworkFrameNumber");
4577 assert(0);
4578 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004579 }
4580 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004581
4582 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4583 LOGD("Internal Request drop the notifyCb");
4584 } else {
4585 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4586 mCallbackOps->notify(mCallbackOps, notify_msg);
4587 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004588}
4589
4590/*===========================================================================
4591 * FUNCTION : FrameNumberRegistry
4592 *
4593 * DESCRIPTION: Constructor
4594 *
4595 * PARAMETERS :
4596 *
4597 * RETURN :
4598 *
4599 *==========================================================================*/
4600FrameNumberRegistry::FrameNumberRegistry()
4601{
4602 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4603}
4604
4605/*===========================================================================
4606 * FUNCTION : ~FrameNumberRegistry
4607 *
4608 * DESCRIPTION: Destructor
4609 *
4610 * PARAMETERS :
4611 *
4612 * RETURN :
4613 *
4614 *==========================================================================*/
4615FrameNumberRegistry::~FrameNumberRegistry()
4616{
4617}
4618
4619/*===========================================================================
4620 * FUNCTION : PurgeOldEntriesLocked
4621 *
4622 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4623 *
4624 * PARAMETERS :
4625 *
4626 * RETURN : NONE
4627 *
4628 *==========================================================================*/
4629void FrameNumberRegistry::purgeOldEntriesLocked()
4630{
4631 while (_register.begin() != _register.end()) {
4632 auto itr = _register.begin();
4633 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4634 _register.erase(itr);
4635 } else {
4636 return;
4637 }
4638 }
4639}
4640
4641/*===========================================================================
4642 * FUNCTION : allocStoreInternalFrameNumber
4643 *
4644 * DESCRIPTION: Method to note down a framework request and associate a new
4645 * internal request number against it
4646 *
4647 * PARAMETERS :
4648 * @fFrameNumber: Identifier given by framework
4649 * @internalFN : Output parameter which will have the newly generated internal
4650 * entry
4651 *
4652 * RETURN : Error code
4653 *
4654 *==========================================================================*/
4655int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4656 uint32_t &internalFrameNumber)
4657{
4658 Mutex::Autolock lock(mRegistryLock);
4659 internalFrameNumber = _nextFreeInternalNumber++;
4660 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4661 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4662 purgeOldEntriesLocked();
4663 return NO_ERROR;
4664}
4665
4666/*===========================================================================
4667 * FUNCTION : generateStoreInternalFrameNumber
4668 *
4669 * DESCRIPTION: Method to associate a new internal request number independent
4670 * of any associate with framework requests
4671 *
4672 * PARAMETERS :
4673 * @internalFrame#: Output parameter which will have the newly generated internal
4674 *
4675 *
4676 * RETURN : Error code
4677 *
4678 *==========================================================================*/
4679int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4680{
4681 Mutex::Autolock lock(mRegistryLock);
4682 internalFrameNumber = _nextFreeInternalNumber++;
4683 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4684 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4685 purgeOldEntriesLocked();
4686 return NO_ERROR;
4687}
4688
4689/*===========================================================================
4690 * FUNCTION : getFrameworkFrameNumber
4691 *
4692 * DESCRIPTION: Method to query the framework framenumber given an internal #
4693 *
4694 * PARAMETERS :
4695 * @internalFrame#: Internal reference
4696 * @frameworkframenumber: Output parameter holding framework frame entry
4697 *
4698 * RETURN : Error code
4699 *
4700 *==========================================================================*/
4701int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4702 uint32_t &frameworkFrameNumber)
4703{
4704 Mutex::Autolock lock(mRegistryLock);
4705 auto itr = _register.find(internalFrameNumber);
4706 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004707 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004708 return -ENOENT;
4709 }
4710
4711 frameworkFrameNumber = itr->second;
4712 purgeOldEntriesLocked();
4713 return NO_ERROR;
4714}
Thierry Strudel3d639192016-09-09 11:52:26 -07004715
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004716status_t QCamera3HardwareInterface::fillPbStreamConfig(
4717 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4718 QCamera3Channel *channel, uint32_t streamIndex) {
4719 if (config == nullptr) {
4720 LOGE("%s: config is null", __FUNCTION__);
4721 return BAD_VALUE;
4722 }
4723
4724 if (channel == nullptr) {
4725 LOGE("%s: channel is null", __FUNCTION__);
4726 return BAD_VALUE;
4727 }
4728
4729 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4730 if (stream == nullptr) {
4731 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4732 return NAME_NOT_FOUND;
4733 }
4734
4735 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4736 if (streamInfo == nullptr) {
4737 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4738 return NAME_NOT_FOUND;
4739 }
4740
4741 config->id = pbStreamId;
4742 config->image.width = streamInfo->dim.width;
4743 config->image.height = streamInfo->dim.height;
4744 config->image.padding = 0;
4745 config->image.format = pbStreamFormat;
4746
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004747 uint32_t totalPlaneSize = 0;
4748
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004749 // Fill plane information.
4750 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4751 pbcamera::PlaneConfiguration plane;
4752 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4753 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4754 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004755
4756 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004757 }
4758
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004759 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004760 return OK;
4761}
4762
Thierry Strudel3d639192016-09-09 11:52:26 -07004763/*===========================================================================
4764 * FUNCTION : processCaptureRequest
4765 *
4766 * DESCRIPTION: process a capture request from camera service
4767 *
4768 * PARAMETERS :
4769 * @request : request from framework to process
4770 *
4771 * RETURN :
4772 *
4773 *==========================================================================*/
4774int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004775 camera3_capture_request_t *request,
4776 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004777{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004778 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004779 int rc = NO_ERROR;
4780 int32_t request_id;
4781 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004782 bool isVidBufRequested = false;
4783 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004784 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004785
4786 pthread_mutex_lock(&mMutex);
4787
4788 // Validate current state
4789 switch (mState) {
4790 case CONFIGURED:
4791 case STARTED:
4792 /* valid state */
4793 break;
4794
4795 case ERROR:
4796 pthread_mutex_unlock(&mMutex);
4797 handleCameraDeviceError();
4798 return -ENODEV;
4799
4800 default:
4801 LOGE("Invalid state %d", mState);
4802 pthread_mutex_unlock(&mMutex);
4803 return -ENODEV;
4804 }
4805
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004806 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004807 if (rc != NO_ERROR) {
4808 LOGE("incoming request is not valid");
4809 pthread_mutex_unlock(&mMutex);
4810 return rc;
4811 }
4812
4813 meta = request->settings;
4814
4815 // For first capture request, send capture intent, and
4816 // stream on all streams
4817 if (mState == CONFIGURED) {
4818 // send an unconfigure to the backend so that the isp
4819 // resources are deallocated
4820 if (!mFirstConfiguration) {
4821 cam_stream_size_info_t stream_config_info;
4822 int32_t hal_version = CAM_HAL_V3;
4823 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4824 stream_config_info.buffer_info.min_buffers =
4825 MIN_INFLIGHT_REQUESTS;
4826 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004827 m_bIs4KVideo ? 0 :
4828 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004829 clear_metadata_buffer(mParameters);
4830 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4831 CAM_INTF_PARM_HAL_VERSION, hal_version);
4832 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4833 CAM_INTF_META_STREAM_INFO, stream_config_info);
4834 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4835 mParameters);
4836 if (rc < 0) {
4837 LOGE("set_parms for unconfigure failed");
4838 pthread_mutex_unlock(&mMutex);
4839 return rc;
4840 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004841
Thierry Strudel3d639192016-09-09 11:52:26 -07004842 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004843 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004844 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004845 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004846 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004847 property_get("persist.camera.is_type", is_type_value, "4");
4848 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4849 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4850 property_get("persist.camera.is_type_preview", is_type_value, "4");
4851 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4852 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004853
4854 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4855 int32_t hal_version = CAM_HAL_V3;
4856 uint8_t captureIntent =
4857 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4858 mCaptureIntent = captureIntent;
4859 clear_metadata_buffer(mParameters);
4860 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4861 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4862 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004863 if (mFirstConfiguration) {
4864 // configure instant AEC
4865 // Instant AEC is a session based parameter and it is needed only
4866 // once per complete session after open camera.
4867 // i.e. This is set only once for the first capture request, after open camera.
4868 setInstantAEC(meta);
4869 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004870 uint8_t fwkVideoStabMode=0;
4871 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4872 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4873 }
4874
Xue Tuecac74e2017-04-17 13:58:15 -07004875 // If EIS setprop is enabled then only turn it on for video/preview
4876 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004877 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004878 int32_t vsMode;
4879 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4880 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4881 rc = BAD_VALUE;
4882 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004883 LOGD("setEis %d", setEis);
4884 bool eis3Supported = false;
4885 size_t count = IS_TYPE_MAX;
4886 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4887 for (size_t i = 0; i < count; i++) {
4888 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4889 eis3Supported = true;
4890 break;
4891 }
4892 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004893
4894 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004895 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004896 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4897 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004898 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4899 is_type = isTypePreview;
4900 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4901 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4902 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004903 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004904 } else {
4905 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004906 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004907 } else {
4908 is_type = IS_TYPE_NONE;
4909 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004910 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004911 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004912 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4913 }
4914 }
4915
4916 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4917 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4918
Thierry Strudel54dc9782017-02-15 12:12:10 -08004919 //Disable tintless only if the property is set to 0
4920 memset(prop, 0, sizeof(prop));
4921 property_get("persist.camera.tintless.enable", prop, "1");
4922 int32_t tintless_value = atoi(prop);
4923
Thierry Strudel3d639192016-09-09 11:52:26 -07004924 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4925 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004926
Thierry Strudel3d639192016-09-09 11:52:26 -07004927 //Disable CDS for HFR mode or if DIS/EIS is on.
4928 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4929 //after every configure_stream
4930 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4931 (m_bIsVideo)) {
4932 int32_t cds = CAM_CDS_MODE_OFF;
4933 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4934 CAM_INTF_PARM_CDS_MODE, cds))
4935 LOGE("Failed to disable CDS for HFR mode");
4936
4937 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004938
4939 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4940 uint8_t* use_av_timer = NULL;
4941
4942 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004943 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004944 use_av_timer = &m_debug_avtimer;
4945 }
4946 else{
4947 use_av_timer =
4948 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004949 if (use_av_timer) {
4950 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4951 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004952 }
4953
4954 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4955 rc = BAD_VALUE;
4956 }
4957 }
4958
Thierry Strudel3d639192016-09-09 11:52:26 -07004959 setMobicat();
4960
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004961 uint8_t nrMode = 0;
4962 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4963 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4964 }
4965
Thierry Strudel3d639192016-09-09 11:52:26 -07004966 /* Set fps and hfr mode while sending meta stream info so that sensor
4967 * can configure appropriate streaming mode */
4968 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004969 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4970 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004971 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4972 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004973 if (rc == NO_ERROR) {
4974 int32_t max_fps =
4975 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004976 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004977 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4978 }
4979 /* For HFR, more buffers are dequeued upfront to improve the performance */
4980 if (mBatchSize) {
4981 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4982 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4983 }
4984 }
4985 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004986 LOGE("setHalFpsRange failed");
4987 }
4988 }
4989 if (meta.exists(ANDROID_CONTROL_MODE)) {
4990 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4991 rc = extractSceneMode(meta, metaMode, mParameters);
4992 if (rc != NO_ERROR) {
4993 LOGE("extractSceneMode failed");
4994 }
4995 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004996 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004997
Thierry Strudel04e026f2016-10-10 11:27:36 -07004998 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4999 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5000 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5001 rc = setVideoHdrMode(mParameters, vhdr);
5002 if (rc != NO_ERROR) {
5003 LOGE("setVideoHDR is failed");
5004 }
5005 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005006
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005007 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005008 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005009 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005010 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5011 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5012 sensorModeFullFov)) {
5013 rc = BAD_VALUE;
5014 }
5015 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005016 //TODO: validate the arguments, HSV scenemode should have only the
5017 //advertised fps ranges
5018
5019 /*set the capture intent, hal version, tintless, stream info,
5020 *and disenable parameters to the backend*/
5021 LOGD("set_parms META_STREAM_INFO " );
5022 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005023 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5024 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005025 mStreamConfigInfo.type[i],
5026 mStreamConfigInfo.stream_sizes[i].width,
5027 mStreamConfigInfo.stream_sizes[i].height,
5028 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005029 mStreamConfigInfo.format[i],
5030 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005031 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005032
Thierry Strudel3d639192016-09-09 11:52:26 -07005033 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5034 mParameters);
5035 if (rc < 0) {
5036 LOGE("set_parms failed for hal version, stream info");
5037 }
5038
Chien-Yu Chenee335912017-02-09 17:53:20 -08005039 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
5040 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005041 if (rc != NO_ERROR) {
5042 LOGE("Failed to get sensor output size");
5043 pthread_mutex_unlock(&mMutex);
5044 goto error_exit;
5045 }
5046
5047 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5048 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08005049 mSensorModeInfo.active_array_size.width,
5050 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005051
5052 /* Set batchmode before initializing channel. Since registerBuffer
5053 * internally initializes some of the channels, better set batchmode
5054 * even before first register buffer */
5055 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5056 it != mStreamInfo.end(); it++) {
5057 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5058 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5059 && mBatchSize) {
5060 rc = channel->setBatchSize(mBatchSize);
5061 //Disable per frame map unmap for HFR/batchmode case
5062 rc |= channel->setPerFrameMapUnmap(false);
5063 if (NO_ERROR != rc) {
5064 LOGE("Channel init failed %d", rc);
5065 pthread_mutex_unlock(&mMutex);
5066 goto error_exit;
5067 }
5068 }
5069 }
5070
5071 //First initialize all streams
5072 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5073 it != mStreamInfo.end(); it++) {
5074 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005075
5076 /* Initial value of NR mode is needed before stream on */
5077 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005078 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5079 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005080 setEis) {
5081 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5082 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5083 is_type = mStreamConfigInfo.is_type[i];
5084 break;
5085 }
5086 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005087 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005088 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005089 rc = channel->initialize(IS_TYPE_NONE);
5090 }
5091 if (NO_ERROR != rc) {
5092 LOGE("Channel initialization failed %d", rc);
5093 pthread_mutex_unlock(&mMutex);
5094 goto error_exit;
5095 }
5096 }
5097
5098 if (mRawDumpChannel) {
5099 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5100 if (rc != NO_ERROR) {
5101 LOGE("Error: Raw Dump Channel init failed");
5102 pthread_mutex_unlock(&mMutex);
5103 goto error_exit;
5104 }
5105 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005106 if (mHdrPlusRawSrcChannel) {
5107 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5108 if (rc != NO_ERROR) {
5109 LOGE("Error: HDR+ RAW Source Channel init failed");
5110 pthread_mutex_unlock(&mMutex);
5111 goto error_exit;
5112 }
5113 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005114 if (mSupportChannel) {
5115 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5116 if (rc < 0) {
5117 LOGE("Support channel initialization failed");
5118 pthread_mutex_unlock(&mMutex);
5119 goto error_exit;
5120 }
5121 }
5122 if (mAnalysisChannel) {
5123 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5124 if (rc < 0) {
5125 LOGE("Analysis channel initialization failed");
5126 pthread_mutex_unlock(&mMutex);
5127 goto error_exit;
5128 }
5129 }
5130 if (mDummyBatchChannel) {
5131 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5132 if (rc < 0) {
5133 LOGE("mDummyBatchChannel setBatchSize failed");
5134 pthread_mutex_unlock(&mMutex);
5135 goto error_exit;
5136 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005137 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005138 if (rc < 0) {
5139 LOGE("mDummyBatchChannel initialization failed");
5140 pthread_mutex_unlock(&mMutex);
5141 goto error_exit;
5142 }
5143 }
5144
5145 // Set bundle info
5146 rc = setBundleInfo();
5147 if (rc < 0) {
5148 LOGE("setBundleInfo failed %d", rc);
5149 pthread_mutex_unlock(&mMutex);
5150 goto error_exit;
5151 }
5152
5153 //update settings from app here
5154 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5155 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5156 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5157 }
5158 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5159 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5160 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5161 }
5162 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5163 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5164 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5165
5166 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5167 (mLinkedCameraId != mCameraId) ) {
5168 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5169 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005170 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005171 goto error_exit;
5172 }
5173 }
5174
5175 // add bundle related cameras
5176 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5177 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005178 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5179 &m_pDualCamCmdPtr->bundle_info;
5180 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005181 if (mIsDeviceLinked)
5182 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5183 else
5184 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5185
5186 pthread_mutex_lock(&gCamLock);
5187
5188 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5189 LOGE("Dualcam: Invalid Session Id ");
5190 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005191 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005192 goto error_exit;
5193 }
5194
5195 if (mIsMainCamera == 1) {
5196 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5197 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005198 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005199 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005200 // related session id should be session id of linked session
5201 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5202 } else {
5203 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5204 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005205 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005206 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005207 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5208 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005209 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005210 pthread_mutex_unlock(&gCamLock);
5211
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005212 rc = mCameraHandle->ops->set_dual_cam_cmd(
5213 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005214 if (rc < 0) {
5215 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005216 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005217 goto error_exit;
5218 }
5219 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005220 goto no_error;
5221error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005222 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005223 return rc;
5224no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005225 mWokenUpByDaemon = false;
5226 mPendingLiveRequest = 0;
5227 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005228 }
5229
5230 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005231 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005232
5233 if (mFlushPerf) {
5234 //we cannot accept any requests during flush
5235 LOGE("process_capture_request cannot proceed during flush");
5236 pthread_mutex_unlock(&mMutex);
5237 return NO_ERROR; //should return an error
5238 }
5239
5240 if (meta.exists(ANDROID_REQUEST_ID)) {
5241 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5242 mCurrentRequestId = request_id;
5243 LOGD("Received request with id: %d", request_id);
5244 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5245 LOGE("Unable to find request id field, \
5246 & no previous id available");
5247 pthread_mutex_unlock(&mMutex);
5248 return NAME_NOT_FOUND;
5249 } else {
5250 LOGD("Re-using old request id");
5251 request_id = mCurrentRequestId;
5252 }
5253
5254 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5255 request->num_output_buffers,
5256 request->input_buffer,
5257 frameNumber);
5258 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005259 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005260 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005261 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005262 uint32_t snapshotStreamId = 0;
5263 for (size_t i = 0; i < request->num_output_buffers; i++) {
5264 const camera3_stream_buffer_t& output = request->output_buffers[i];
5265 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5266
Emilian Peev7650c122017-01-19 08:24:33 -08005267 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5268 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005269 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005270 blob_request = 1;
5271 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5272 }
5273
5274 if (output.acquire_fence != -1) {
5275 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5276 close(output.acquire_fence);
5277 if (rc != OK) {
5278 LOGE("sync wait failed %d", rc);
5279 pthread_mutex_unlock(&mMutex);
5280 return rc;
5281 }
5282 }
5283
Emilian Peev0f3c3162017-03-15 12:57:46 +00005284 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5285 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005286 depthRequestPresent = true;
5287 continue;
5288 }
5289
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005290 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005291 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005292
5293 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5294 isVidBufRequested = true;
5295 }
5296 }
5297
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005298 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5299 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5300 itr++) {
5301 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5302 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5303 channel->getStreamID(channel->getStreamTypeMask());
5304
5305 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5306 isVidBufRequested = true;
5307 }
5308 }
5309
Thierry Strudel3d639192016-09-09 11:52:26 -07005310 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005311 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005312 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005313 }
5314 if (blob_request && mRawDumpChannel) {
5315 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005316 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005317 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005318 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005319 }
5320
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005321 {
5322 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5323 // Request a RAW buffer if
5324 // 1. mHdrPlusRawSrcChannel is valid.
5325 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5326 // 3. There is no pending HDR+ request.
5327 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5328 mHdrPlusPendingRequests.size() == 0) {
5329 streamsArray.stream_request[streamsArray.num_streams].streamID =
5330 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5331 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5332 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005333 }
5334
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005335 //extract capture intent
5336 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5337 mCaptureIntent =
5338 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5339 }
5340
5341 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5342 mCacMode =
5343 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5344 }
5345
5346 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005347 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005348
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005349 {
5350 Mutex::Autolock l(gHdrPlusClientLock);
5351 // If this request has a still capture intent, try to submit an HDR+ request.
5352 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5353 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5354 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5355 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005356 }
5357
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005358 if (hdrPlusRequest) {
5359 // For a HDR+ request, just set the frame parameters.
5360 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5361 if (rc < 0) {
5362 LOGE("fail to set frame parameters");
5363 pthread_mutex_unlock(&mMutex);
5364 return rc;
5365 }
5366 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005367 /* Parse the settings:
5368 * - For every request in NORMAL MODE
5369 * - For every request in HFR mode during preview only case
5370 * - For first request of every batch in HFR mode during video
5371 * recording. In batchmode the same settings except frame number is
5372 * repeated in each request of the batch.
5373 */
5374 if (!mBatchSize ||
5375 (mBatchSize && !isVidBufRequested) ||
5376 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005377 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005378 if (rc < 0) {
5379 LOGE("fail to set frame parameters");
5380 pthread_mutex_unlock(&mMutex);
5381 return rc;
5382 }
5383 }
5384 /* For batchMode HFR, setFrameParameters is not called for every
5385 * request. But only frame number of the latest request is parsed.
5386 * Keep track of first and last frame numbers in a batch so that
5387 * metadata for the frame numbers of batch can be duplicated in
5388 * handleBatchMetadta */
5389 if (mBatchSize) {
5390 if (!mToBeQueuedVidBufs) {
5391 //start of the batch
5392 mFirstFrameNumberInBatch = request->frame_number;
5393 }
5394 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5395 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5396 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005397 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005398 return BAD_VALUE;
5399 }
5400 }
5401 if (mNeedSensorRestart) {
5402 /* Unlock the mutex as restartSensor waits on the channels to be
5403 * stopped, which in turn calls stream callback functions -
5404 * handleBufferWithLock and handleMetadataWithLock */
5405 pthread_mutex_unlock(&mMutex);
5406 rc = dynamicUpdateMetaStreamInfo();
5407 if (rc != NO_ERROR) {
5408 LOGE("Restarting the sensor failed");
5409 return BAD_VALUE;
5410 }
5411 mNeedSensorRestart = false;
5412 pthread_mutex_lock(&mMutex);
5413 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005414 if(mResetInstantAEC) {
5415 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5416 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5417 mResetInstantAEC = false;
5418 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005419 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005420 if (request->input_buffer->acquire_fence != -1) {
5421 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5422 close(request->input_buffer->acquire_fence);
5423 if (rc != OK) {
5424 LOGE("input buffer sync wait failed %d", rc);
5425 pthread_mutex_unlock(&mMutex);
5426 return rc;
5427 }
5428 }
5429 }
5430
5431 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5432 mLastCustIntentFrmNum = frameNumber;
5433 }
5434 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005435 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005436 pendingRequestIterator latestRequest;
5437 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005438 pendingRequest.num_buffers = depthRequestPresent ?
5439 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005440 pendingRequest.request_id = request_id;
5441 pendingRequest.blob_request = blob_request;
5442 pendingRequest.timestamp = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005443 if (request->input_buffer) {
5444 pendingRequest.input_buffer =
5445 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5446 *(pendingRequest.input_buffer) = *(request->input_buffer);
5447 pInputBuffer = pendingRequest.input_buffer;
5448 } else {
5449 pendingRequest.input_buffer = NULL;
5450 pInputBuffer = NULL;
5451 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005452 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005453
5454 pendingRequest.pipeline_depth = 0;
5455 pendingRequest.partial_result_cnt = 0;
5456 extractJpegMetadata(mCurJpegMeta, request);
5457 pendingRequest.jpegMetadata = mCurJpegMeta;
5458 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5459 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005460 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005461 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5462 mHybridAeEnable =
5463 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5464 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005465
5466 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5467 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005468 /* DevCamDebug metadata processCaptureRequest */
5469 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5470 mDevCamDebugMetaEnable =
5471 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5472 }
5473 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5474 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005475
5476 //extract CAC info
5477 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5478 mCacMode =
5479 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5480 }
5481 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005482 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005483
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005484 // extract enableZsl info
5485 if (gExposeEnableZslKey) {
5486 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5487 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5488 mZslEnabled = pendingRequest.enableZsl;
5489 } else {
5490 pendingRequest.enableZsl = mZslEnabled;
5491 }
5492 }
5493
Thierry Strudel3d639192016-09-09 11:52:26 -07005494 PendingBuffersInRequest bufsForCurRequest;
5495 bufsForCurRequest.frame_number = frameNumber;
5496 // Mark current timestamp for the new request
5497 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005498 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005499
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005500 if (hdrPlusRequest) {
5501 // Save settings for this request.
5502 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5503 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5504
5505 // Add to pending HDR+ request queue.
5506 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5507 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5508
5509 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5510 }
5511
Thierry Strudel3d639192016-09-09 11:52:26 -07005512 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005513 if ((request->output_buffers[i].stream->data_space ==
5514 HAL_DATASPACE_DEPTH) &&
5515 (HAL_PIXEL_FORMAT_BLOB ==
5516 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005517 continue;
5518 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005519 RequestedBufferInfo requestedBuf;
5520 memset(&requestedBuf, 0, sizeof(requestedBuf));
5521 requestedBuf.stream = request->output_buffers[i].stream;
5522 requestedBuf.buffer = NULL;
5523 pendingRequest.buffers.push_back(requestedBuf);
5524
5525 // Add to buffer handle the pending buffers list
5526 PendingBufferInfo bufferInfo;
5527 bufferInfo.buffer = request->output_buffers[i].buffer;
5528 bufferInfo.stream = request->output_buffers[i].stream;
5529 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5530 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5531 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5532 frameNumber, bufferInfo.buffer,
5533 channel->getStreamTypeMask(), bufferInfo.stream->format);
5534 }
5535 // Add this request packet into mPendingBuffersMap
5536 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5537 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5538 mPendingBuffersMap.get_num_overall_buffers());
5539
5540 latestRequest = mPendingRequestsList.insert(
5541 mPendingRequestsList.end(), pendingRequest);
5542 if(mFlush) {
5543 LOGI("mFlush is true");
5544 pthread_mutex_unlock(&mMutex);
5545 return NO_ERROR;
5546 }
5547
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005548 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5549 // channel.
5550 if (!hdrPlusRequest) {
5551 int indexUsed;
5552 // Notify metadata channel we receive a request
5553 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005554
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005555 if(request->input_buffer != NULL){
5556 LOGD("Input request, frame_number %d", frameNumber);
5557 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5558 if (NO_ERROR != rc) {
5559 LOGE("fail to set reproc parameters");
5560 pthread_mutex_unlock(&mMutex);
5561 return rc;
5562 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005563 }
5564
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005565 // Call request on other streams
5566 uint32_t streams_need_metadata = 0;
5567 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5568 for (size_t i = 0; i < request->num_output_buffers; i++) {
5569 const camera3_stream_buffer_t& output = request->output_buffers[i];
5570 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5571
5572 if (channel == NULL) {
5573 LOGW("invalid channel pointer for stream");
5574 continue;
5575 }
5576
5577 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5578 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5579 output.buffer, request->input_buffer, frameNumber);
5580 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005581 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005582 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5583 if (rc < 0) {
5584 LOGE("Fail to request on picture channel");
5585 pthread_mutex_unlock(&mMutex);
5586 return rc;
5587 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005588 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005589 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5590 assert(NULL != mDepthChannel);
5591 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005592
Emilian Peev7650c122017-01-19 08:24:33 -08005593 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5594 if (rc < 0) {
5595 LOGE("Fail to map on depth buffer");
5596 pthread_mutex_unlock(&mMutex);
5597 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005598 }
Emilian Peev7650c122017-01-19 08:24:33 -08005599 } else {
5600 LOGD("snapshot request with buffer %p, frame_number %d",
5601 output.buffer, frameNumber);
5602 if (!request->settings) {
5603 rc = channel->request(output.buffer, frameNumber,
5604 NULL, mPrevParameters, indexUsed);
5605 } else {
5606 rc = channel->request(output.buffer, frameNumber,
5607 NULL, mParameters, indexUsed);
5608 }
5609 if (rc < 0) {
5610 LOGE("Fail to request on picture channel");
5611 pthread_mutex_unlock(&mMutex);
5612 return rc;
5613 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005614
Emilian Peev7650c122017-01-19 08:24:33 -08005615 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5616 uint32_t j = 0;
5617 for (j = 0; j < streamsArray.num_streams; j++) {
5618 if (streamsArray.stream_request[j].streamID == streamId) {
5619 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5620 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5621 else
5622 streamsArray.stream_request[j].buf_index = indexUsed;
5623 break;
5624 }
5625 }
5626 if (j == streamsArray.num_streams) {
5627 LOGE("Did not find matching stream to update index");
5628 assert(0);
5629 }
5630
5631 pendingBufferIter->need_metadata = true;
5632 streams_need_metadata++;
5633 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005634 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005635 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5636 bool needMetadata = false;
5637 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5638 rc = yuvChannel->request(output.buffer, frameNumber,
5639 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5640 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005641 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005642 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005643 pthread_mutex_unlock(&mMutex);
5644 return rc;
5645 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005646
5647 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5648 uint32_t j = 0;
5649 for (j = 0; j < streamsArray.num_streams; j++) {
5650 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005651 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5652 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5653 else
5654 streamsArray.stream_request[j].buf_index = indexUsed;
5655 break;
5656 }
5657 }
5658 if (j == streamsArray.num_streams) {
5659 LOGE("Did not find matching stream to update index");
5660 assert(0);
5661 }
5662
5663 pendingBufferIter->need_metadata = needMetadata;
5664 if (needMetadata)
5665 streams_need_metadata += 1;
5666 LOGD("calling YUV channel request, need_metadata is %d",
5667 needMetadata);
5668 } else {
5669 LOGD("request with buffer %p, frame_number %d",
5670 output.buffer, frameNumber);
5671
5672 rc = channel->request(output.buffer, frameNumber, indexUsed);
5673
5674 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5675 uint32_t j = 0;
5676 for (j = 0; j < streamsArray.num_streams; j++) {
5677 if (streamsArray.stream_request[j].streamID == streamId) {
5678 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5679 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5680 else
5681 streamsArray.stream_request[j].buf_index = indexUsed;
5682 break;
5683 }
5684 }
5685 if (j == streamsArray.num_streams) {
5686 LOGE("Did not find matching stream to update index");
5687 assert(0);
5688 }
5689
5690 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5691 && mBatchSize) {
5692 mToBeQueuedVidBufs++;
5693 if (mToBeQueuedVidBufs == mBatchSize) {
5694 channel->queueBatchBuf();
5695 }
5696 }
5697 if (rc < 0) {
5698 LOGE("request failed");
5699 pthread_mutex_unlock(&mMutex);
5700 return rc;
5701 }
5702 }
5703 pendingBufferIter++;
5704 }
5705
5706 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5707 itr++) {
5708 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5709
5710 if (channel == NULL) {
5711 LOGE("invalid channel pointer for stream");
5712 assert(0);
5713 return BAD_VALUE;
5714 }
5715
5716 InternalRequest requestedStream;
5717 requestedStream = (*itr);
5718
5719
5720 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5721 LOGD("snapshot request internally input buffer %p, frame_number %d",
5722 request->input_buffer, frameNumber);
5723 if(request->input_buffer != NULL){
5724 rc = channel->request(NULL, frameNumber,
5725 pInputBuffer, &mReprocMeta, indexUsed, true,
5726 requestedStream.meteringOnly);
5727 if (rc < 0) {
5728 LOGE("Fail to request on picture channel");
5729 pthread_mutex_unlock(&mMutex);
5730 return rc;
5731 }
5732 } else {
5733 LOGD("snapshot request with frame_number %d", frameNumber);
5734 if (!request->settings) {
5735 rc = channel->request(NULL, frameNumber,
5736 NULL, mPrevParameters, indexUsed, true,
5737 requestedStream.meteringOnly);
5738 } else {
5739 rc = channel->request(NULL, frameNumber,
5740 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5741 }
5742 if (rc < 0) {
5743 LOGE("Fail to request on picture channel");
5744 pthread_mutex_unlock(&mMutex);
5745 return rc;
5746 }
5747
5748 if ((*itr).meteringOnly != 1) {
5749 requestedStream.need_metadata = 1;
5750 streams_need_metadata++;
5751 }
5752 }
5753
5754 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5755 uint32_t j = 0;
5756 for (j = 0; j < streamsArray.num_streams; j++) {
5757 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005758 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5759 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5760 else
5761 streamsArray.stream_request[j].buf_index = indexUsed;
5762 break;
5763 }
5764 }
5765 if (j == streamsArray.num_streams) {
5766 LOGE("Did not find matching stream to update index");
5767 assert(0);
5768 }
5769
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005770 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005771 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005772 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005773 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005774 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005775 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005776 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005777
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005778 //If 2 streams have need_metadata set to true, fail the request, unless
5779 //we copy/reference count the metadata buffer
5780 if (streams_need_metadata > 1) {
5781 LOGE("not supporting request in which two streams requires"
5782 " 2 HAL metadata for reprocessing");
5783 pthread_mutex_unlock(&mMutex);
5784 return -EINVAL;
5785 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005786
Emilian Peev7650c122017-01-19 08:24:33 -08005787 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5788 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5789 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5790 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5791 pthread_mutex_unlock(&mMutex);
5792 return BAD_VALUE;
5793 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005794 if (request->input_buffer == NULL) {
5795 /* Set the parameters to backend:
5796 * - For every request in NORMAL MODE
5797 * - For every request in HFR mode during preview only case
5798 * - Once every batch in HFR mode during video recording
5799 */
5800 if (!mBatchSize ||
5801 (mBatchSize && !isVidBufRequested) ||
5802 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5803 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5804 mBatchSize, isVidBufRequested,
5805 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005806
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005807 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5808 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5809 uint32_t m = 0;
5810 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5811 if (streamsArray.stream_request[k].streamID ==
5812 mBatchedStreamsArray.stream_request[m].streamID)
5813 break;
5814 }
5815 if (m == mBatchedStreamsArray.num_streams) {
5816 mBatchedStreamsArray.stream_request\
5817 [mBatchedStreamsArray.num_streams].streamID =
5818 streamsArray.stream_request[k].streamID;
5819 mBatchedStreamsArray.stream_request\
5820 [mBatchedStreamsArray.num_streams].buf_index =
5821 streamsArray.stream_request[k].buf_index;
5822 mBatchedStreamsArray.num_streams =
5823 mBatchedStreamsArray.num_streams + 1;
5824 }
5825 }
5826 streamsArray = mBatchedStreamsArray;
5827 }
5828 /* Update stream id of all the requested buffers */
5829 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5830 streamsArray)) {
5831 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005832 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005833 return BAD_VALUE;
5834 }
5835
5836 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5837 mParameters);
5838 if (rc < 0) {
5839 LOGE("set_parms failed");
5840 }
5841 /* reset to zero coz, the batch is queued */
5842 mToBeQueuedVidBufs = 0;
5843 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5844 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5845 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005846 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5847 uint32_t m = 0;
5848 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5849 if (streamsArray.stream_request[k].streamID ==
5850 mBatchedStreamsArray.stream_request[m].streamID)
5851 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005852 }
5853 if (m == mBatchedStreamsArray.num_streams) {
5854 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5855 streamID = streamsArray.stream_request[k].streamID;
5856 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5857 buf_index = streamsArray.stream_request[k].buf_index;
5858 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5859 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005860 }
5861 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005862 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005863
5864 // Start all streams after the first setting is sent, so that the
5865 // setting can be applied sooner: (0 + apply_delay)th frame.
5866 if (mState == CONFIGURED && mChannelHandle) {
5867 //Then start them.
5868 LOGH("Start META Channel");
5869 rc = mMetadataChannel->start();
5870 if (rc < 0) {
5871 LOGE("META channel start failed");
5872 pthread_mutex_unlock(&mMutex);
5873 return rc;
5874 }
5875
5876 if (mAnalysisChannel) {
5877 rc = mAnalysisChannel->start();
5878 if (rc < 0) {
5879 LOGE("Analysis channel start failed");
5880 mMetadataChannel->stop();
5881 pthread_mutex_unlock(&mMutex);
5882 return rc;
5883 }
5884 }
5885
5886 if (mSupportChannel) {
5887 rc = mSupportChannel->start();
5888 if (rc < 0) {
5889 LOGE("Support channel start failed");
5890 mMetadataChannel->stop();
5891 /* Although support and analysis are mutually exclusive today
5892 adding it in anycase for future proofing */
5893 if (mAnalysisChannel) {
5894 mAnalysisChannel->stop();
5895 }
5896 pthread_mutex_unlock(&mMutex);
5897 return rc;
5898 }
5899 }
5900 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5901 it != mStreamInfo.end(); it++) {
5902 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5903 LOGH("Start Processing Channel mask=%d",
5904 channel->getStreamTypeMask());
5905 rc = channel->start();
5906 if (rc < 0) {
5907 LOGE("channel start failed");
5908 pthread_mutex_unlock(&mMutex);
5909 return rc;
5910 }
5911 }
5912
5913 if (mRawDumpChannel) {
5914 LOGD("Starting raw dump stream");
5915 rc = mRawDumpChannel->start();
5916 if (rc != NO_ERROR) {
5917 LOGE("Error Starting Raw Dump Channel");
5918 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5919 it != mStreamInfo.end(); it++) {
5920 QCamera3Channel *channel =
5921 (QCamera3Channel *)(*it)->stream->priv;
5922 LOGH("Stopping Processing Channel mask=%d",
5923 channel->getStreamTypeMask());
5924 channel->stop();
5925 }
5926 if (mSupportChannel)
5927 mSupportChannel->stop();
5928 if (mAnalysisChannel) {
5929 mAnalysisChannel->stop();
5930 }
5931 mMetadataChannel->stop();
5932 pthread_mutex_unlock(&mMutex);
5933 return rc;
5934 }
5935 }
5936
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005937 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005938 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005939 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005940 if (rc != NO_ERROR) {
5941 LOGE("start_channel failed %d", rc);
5942 pthread_mutex_unlock(&mMutex);
5943 return rc;
5944 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07005945
5946 {
5947 // Configure Easel for stream on.
5948 Mutex::Autolock l(gHdrPlusClientLock);
5949 if (EaselManagerClientOpened) {
5950 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
5951 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
5952 if (rc != OK) {
5953 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
5954 mCameraId, mSensorModeInfo.op_pixel_clk);
5955 pthread_mutex_unlock(&mMutex);
5956 return rc;
5957 }
5958 }
5959 }
5960
5961 // Start sensor streaming.
5962 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
5963 mChannelHandle);
5964 if (rc != NO_ERROR) {
5965 LOGE("start_sensor_stream_on failed %d", rc);
5966 pthread_mutex_unlock(&mMutex);
5967 return rc;
5968 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005969 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005970 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005971 }
5972
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07005973 // Enable HDR+ mode for the first PREVIEW_INTENT request.
5974 {
5975 Mutex::Autolock l(gHdrPlusClientLock);
5976 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5977 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5978 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5979 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5980 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5981 rc = enableHdrPlusModeLocked();
5982 if (rc != OK) {
5983 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
5984 pthread_mutex_unlock(&mMutex);
5985 return rc;
5986 }
5987
5988 mFirstPreviewIntentSeen = true;
5989 }
5990 }
5991
Thierry Strudel3d639192016-09-09 11:52:26 -07005992 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5993
5994 mState = STARTED;
5995 // Added a timed condition wait
5996 struct timespec ts;
5997 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005998 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005999 if (rc < 0) {
6000 isValidTimeout = 0;
6001 LOGE("Error reading the real time clock!!");
6002 }
6003 else {
6004 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006005 int64_t timeout = 5;
6006 {
6007 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6008 // If there is a pending HDR+ request, the following requests may be blocked until the
6009 // HDR+ request is done. So allow a longer timeout.
6010 if (mHdrPlusPendingRequests.size() > 0) {
6011 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6012 }
6013 }
6014 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006015 }
6016 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006017 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006018 (mState != ERROR) && (mState != DEINIT)) {
6019 if (!isValidTimeout) {
6020 LOGD("Blocking on conditional wait");
6021 pthread_cond_wait(&mRequestCond, &mMutex);
6022 }
6023 else {
6024 LOGD("Blocking on timed conditional wait");
6025 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6026 if (rc == ETIMEDOUT) {
6027 rc = -ENODEV;
6028 LOGE("Unblocked on timeout!!!!");
6029 break;
6030 }
6031 }
6032 LOGD("Unblocked");
6033 if (mWokenUpByDaemon) {
6034 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006035 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006036 break;
6037 }
6038 }
6039 pthread_mutex_unlock(&mMutex);
6040
6041 return rc;
6042}
6043
6044/*===========================================================================
6045 * FUNCTION : dump
6046 *
6047 * DESCRIPTION:
6048 *
6049 * PARAMETERS :
6050 *
6051 *
6052 * RETURN :
6053 *==========================================================================*/
6054void QCamera3HardwareInterface::dump(int fd)
6055{
6056 pthread_mutex_lock(&mMutex);
6057 dprintf(fd, "\n Camera HAL3 information Begin \n");
6058
6059 dprintf(fd, "\nNumber of pending requests: %zu \n",
6060 mPendingRequestsList.size());
6061 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6062 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6063 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6064 for(pendingRequestIterator i = mPendingRequestsList.begin();
6065 i != mPendingRequestsList.end(); i++) {
6066 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6067 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6068 i->input_buffer);
6069 }
6070 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6071 mPendingBuffersMap.get_num_overall_buffers());
6072 dprintf(fd, "-------+------------------\n");
6073 dprintf(fd, " Frame | Stream type mask \n");
6074 dprintf(fd, "-------+------------------\n");
6075 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6076 for(auto &j : req.mPendingBufferList) {
6077 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6078 dprintf(fd, " %5d | %11d \n",
6079 req.frame_number, channel->getStreamTypeMask());
6080 }
6081 }
6082 dprintf(fd, "-------+------------------\n");
6083
6084 dprintf(fd, "\nPending frame drop list: %zu\n",
6085 mPendingFrameDropList.size());
6086 dprintf(fd, "-------+-----------\n");
6087 dprintf(fd, " Frame | Stream ID \n");
6088 dprintf(fd, "-------+-----------\n");
6089 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6090 i != mPendingFrameDropList.end(); i++) {
6091 dprintf(fd, " %5d | %9d \n",
6092 i->frame_number, i->stream_ID);
6093 }
6094 dprintf(fd, "-------+-----------\n");
6095
6096 dprintf(fd, "\n Camera HAL3 information End \n");
6097
6098 /* use dumpsys media.camera as trigger to send update debug level event */
6099 mUpdateDebugLevel = true;
6100 pthread_mutex_unlock(&mMutex);
6101 return;
6102}
6103
6104/*===========================================================================
6105 * FUNCTION : flush
6106 *
6107 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6108 * conditionally restarts channels
6109 *
6110 * PARAMETERS :
6111 * @ restartChannels: re-start all channels
6112 *
6113 *
6114 * RETURN :
6115 * 0 on success
6116 * Error code on failure
6117 *==========================================================================*/
6118int QCamera3HardwareInterface::flush(bool restartChannels)
6119{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006120 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006121 int32_t rc = NO_ERROR;
6122
6123 LOGD("Unblocking Process Capture Request");
6124 pthread_mutex_lock(&mMutex);
6125 mFlush = true;
6126 pthread_mutex_unlock(&mMutex);
6127
6128 rc = stopAllChannels();
6129 // unlink of dualcam
6130 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006131 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6132 &m_pDualCamCmdPtr->bundle_info;
6133 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006134 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6135 pthread_mutex_lock(&gCamLock);
6136
6137 if (mIsMainCamera == 1) {
6138 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6139 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006140 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006141 // related session id should be session id of linked session
6142 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6143 } else {
6144 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6145 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006146 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006147 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6148 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006149 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006150 pthread_mutex_unlock(&gCamLock);
6151
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006152 rc = mCameraHandle->ops->set_dual_cam_cmd(
6153 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006154 if (rc < 0) {
6155 LOGE("Dualcam: Unlink failed, but still proceed to close");
6156 }
6157 }
6158
6159 if (rc < 0) {
6160 LOGE("stopAllChannels failed");
6161 return rc;
6162 }
6163 if (mChannelHandle) {
6164 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6165 mChannelHandle);
6166 }
6167
6168 // Reset bundle info
6169 rc = setBundleInfo();
6170 if (rc < 0) {
6171 LOGE("setBundleInfo failed %d", rc);
6172 return rc;
6173 }
6174
6175 // Mutex Lock
6176 pthread_mutex_lock(&mMutex);
6177
6178 // Unblock process_capture_request
6179 mPendingLiveRequest = 0;
6180 pthread_cond_signal(&mRequestCond);
6181
6182 rc = notifyErrorForPendingRequests();
6183 if (rc < 0) {
6184 LOGE("notifyErrorForPendingRequests failed");
6185 pthread_mutex_unlock(&mMutex);
6186 return rc;
6187 }
6188
6189 mFlush = false;
6190
6191 // Start the Streams/Channels
6192 if (restartChannels) {
6193 rc = startAllChannels();
6194 if (rc < 0) {
6195 LOGE("startAllChannels failed");
6196 pthread_mutex_unlock(&mMutex);
6197 return rc;
6198 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006199 if (mChannelHandle) {
6200 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006201 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006202 if (rc < 0) {
6203 LOGE("start_channel failed");
6204 pthread_mutex_unlock(&mMutex);
6205 return rc;
6206 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006207 }
6208 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006209 pthread_mutex_unlock(&mMutex);
6210
6211 return 0;
6212}
6213
6214/*===========================================================================
6215 * FUNCTION : flushPerf
6216 *
6217 * DESCRIPTION: This is the performance optimization version of flush that does
6218 * not use stream off, rather flushes the system
6219 *
6220 * PARAMETERS :
6221 *
6222 *
6223 * RETURN : 0 : success
6224 * -EINVAL: input is malformed (device is not valid)
6225 * -ENODEV: if the device has encountered a serious error
6226 *==========================================================================*/
6227int QCamera3HardwareInterface::flushPerf()
6228{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006229 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006230 int32_t rc = 0;
6231 struct timespec timeout;
6232 bool timed_wait = false;
6233
6234 pthread_mutex_lock(&mMutex);
6235 mFlushPerf = true;
6236 mPendingBuffersMap.numPendingBufsAtFlush =
6237 mPendingBuffersMap.get_num_overall_buffers();
6238 LOGD("Calling flush. Wait for %d buffers to return",
6239 mPendingBuffersMap.numPendingBufsAtFlush);
6240
6241 /* send the flush event to the backend */
6242 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6243 if (rc < 0) {
6244 LOGE("Error in flush: IOCTL failure");
6245 mFlushPerf = false;
6246 pthread_mutex_unlock(&mMutex);
6247 return -ENODEV;
6248 }
6249
6250 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6251 LOGD("No pending buffers in HAL, return flush");
6252 mFlushPerf = false;
6253 pthread_mutex_unlock(&mMutex);
6254 return rc;
6255 }
6256
6257 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006258 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006259 if (rc < 0) {
6260 LOGE("Error reading the real time clock, cannot use timed wait");
6261 } else {
6262 timeout.tv_sec += FLUSH_TIMEOUT;
6263 timed_wait = true;
6264 }
6265
6266 //Block on conditional variable
6267 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6268 LOGD("Waiting on mBuffersCond");
6269 if (!timed_wait) {
6270 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6271 if (rc != 0) {
6272 LOGE("pthread_cond_wait failed due to rc = %s",
6273 strerror(rc));
6274 break;
6275 }
6276 } else {
6277 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6278 if (rc != 0) {
6279 LOGE("pthread_cond_timedwait failed due to rc = %s",
6280 strerror(rc));
6281 break;
6282 }
6283 }
6284 }
6285 if (rc != 0) {
6286 mFlushPerf = false;
6287 pthread_mutex_unlock(&mMutex);
6288 return -ENODEV;
6289 }
6290
6291 LOGD("Received buffers, now safe to return them");
6292
6293 //make sure the channels handle flush
6294 //currently only required for the picture channel to release snapshot resources
6295 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6296 it != mStreamInfo.end(); it++) {
6297 QCamera3Channel *channel = (*it)->channel;
6298 if (channel) {
6299 rc = channel->flush();
6300 if (rc) {
6301 LOGE("Flushing the channels failed with error %d", rc);
6302 // even though the channel flush failed we need to continue and
6303 // return the buffers we have to the framework, however the return
6304 // value will be an error
6305 rc = -ENODEV;
6306 }
6307 }
6308 }
6309
6310 /* notify the frameworks and send errored results */
6311 rc = notifyErrorForPendingRequests();
6312 if (rc < 0) {
6313 LOGE("notifyErrorForPendingRequests failed");
6314 pthread_mutex_unlock(&mMutex);
6315 return rc;
6316 }
6317
6318 //unblock process_capture_request
6319 mPendingLiveRequest = 0;
6320 unblockRequestIfNecessary();
6321
6322 mFlushPerf = false;
6323 pthread_mutex_unlock(&mMutex);
6324 LOGD ("Flush Operation complete. rc = %d", rc);
6325 return rc;
6326}
6327
6328/*===========================================================================
6329 * FUNCTION : handleCameraDeviceError
6330 *
6331 * DESCRIPTION: This function calls internal flush and notifies the error to
6332 * framework and updates the state variable.
6333 *
6334 * PARAMETERS : None
6335 *
6336 * RETURN : NO_ERROR on Success
6337 * Error code on failure
6338 *==========================================================================*/
6339int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6340{
6341 int32_t rc = NO_ERROR;
6342
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006343 {
6344 Mutex::Autolock lock(mFlushLock);
6345 pthread_mutex_lock(&mMutex);
6346 if (mState != ERROR) {
6347 //if mState != ERROR, nothing to be done
6348 pthread_mutex_unlock(&mMutex);
6349 return NO_ERROR;
6350 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006351 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006352
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006353 rc = flush(false /* restart channels */);
6354 if (NO_ERROR != rc) {
6355 LOGE("internal flush to handle mState = ERROR failed");
6356 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006357
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006358 pthread_mutex_lock(&mMutex);
6359 mState = DEINIT;
6360 pthread_mutex_unlock(&mMutex);
6361 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006362
6363 camera3_notify_msg_t notify_msg;
6364 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6365 notify_msg.type = CAMERA3_MSG_ERROR;
6366 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6367 notify_msg.message.error.error_stream = NULL;
6368 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006369 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006370
6371 return rc;
6372}
6373
6374/*===========================================================================
6375 * FUNCTION : captureResultCb
6376 *
6377 * DESCRIPTION: Callback handler for all capture result
6378 * (streams, as well as metadata)
6379 *
6380 * PARAMETERS :
6381 * @metadata : metadata information
6382 * @buffer : actual gralloc buffer to be returned to frameworks.
6383 * NULL if metadata.
6384 *
6385 * RETURN : NONE
6386 *==========================================================================*/
6387void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6388 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6389{
6390 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006391 pthread_mutex_lock(&mMutex);
6392 uint8_t batchSize = mBatchSize;
6393 pthread_mutex_unlock(&mMutex);
6394 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006395 handleBatchMetadata(metadata_buf,
6396 true /* free_and_bufdone_meta_buf */);
6397 } else { /* mBatchSize = 0 */
6398 hdrPlusPerfLock(metadata_buf);
6399 pthread_mutex_lock(&mMutex);
6400 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006401 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006402 true /* last urgent frame of batch metadata */,
6403 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006404 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006405 pthread_mutex_unlock(&mMutex);
6406 }
6407 } else if (isInputBuffer) {
6408 pthread_mutex_lock(&mMutex);
6409 handleInputBufferWithLock(frame_number);
6410 pthread_mutex_unlock(&mMutex);
6411 } else {
6412 pthread_mutex_lock(&mMutex);
6413 handleBufferWithLock(buffer, frame_number);
6414 pthread_mutex_unlock(&mMutex);
6415 }
6416 return;
6417}
6418
6419/*===========================================================================
6420 * FUNCTION : getReprocessibleOutputStreamId
6421 *
6422 * DESCRIPTION: Get source output stream id for the input reprocess stream
6423 * based on size and format, which would be the largest
6424 * output stream if an input stream exists.
6425 *
6426 * PARAMETERS :
6427 * @id : return the stream id if found
6428 *
6429 * RETURN : int32_t type of status
6430 * NO_ERROR -- success
6431 * none-zero failure code
6432 *==========================================================================*/
6433int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6434{
6435 /* check if any output or bidirectional stream with the same size and format
6436 and return that stream */
6437 if ((mInputStreamInfo.dim.width > 0) &&
6438 (mInputStreamInfo.dim.height > 0)) {
6439 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6440 it != mStreamInfo.end(); it++) {
6441
6442 camera3_stream_t *stream = (*it)->stream;
6443 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6444 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6445 (stream->format == mInputStreamInfo.format)) {
6446 // Usage flag for an input stream and the source output stream
6447 // may be different.
6448 LOGD("Found reprocessible output stream! %p", *it);
6449 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6450 stream->usage, mInputStreamInfo.usage);
6451
6452 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6453 if (channel != NULL && channel->mStreams[0]) {
6454 id = channel->mStreams[0]->getMyServerID();
6455 return NO_ERROR;
6456 }
6457 }
6458 }
6459 } else {
6460 LOGD("No input stream, so no reprocessible output stream");
6461 }
6462 return NAME_NOT_FOUND;
6463}
6464
6465/*===========================================================================
6466 * FUNCTION : lookupFwkName
6467 *
6468 * DESCRIPTION: In case the enum is not same in fwk and backend
6469 * make sure the parameter is correctly propogated
6470 *
6471 * PARAMETERS :
6472 * @arr : map between the two enums
6473 * @len : len of the map
6474 * @hal_name : name of the hal_parm to map
6475 *
6476 * RETURN : int type of status
6477 * fwk_name -- success
6478 * none-zero failure code
6479 *==========================================================================*/
6480template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6481 size_t len, halType hal_name)
6482{
6483
6484 for (size_t i = 0; i < len; i++) {
6485 if (arr[i].hal_name == hal_name) {
6486 return arr[i].fwk_name;
6487 }
6488 }
6489
6490 /* Not able to find matching framework type is not necessarily
6491 * an error case. This happens when mm-camera supports more attributes
6492 * than the frameworks do */
6493 LOGH("Cannot find matching framework type");
6494 return NAME_NOT_FOUND;
6495}
6496
6497/*===========================================================================
6498 * FUNCTION : lookupHalName
6499 *
6500 * DESCRIPTION: In case the enum is not same in fwk and backend
6501 * make sure the parameter is correctly propogated
6502 *
6503 * PARAMETERS :
6504 * @arr : map between the two enums
6505 * @len : len of the map
6506 * @fwk_name : name of the hal_parm to map
6507 *
6508 * RETURN : int32_t type of status
6509 * hal_name -- success
6510 * none-zero failure code
6511 *==========================================================================*/
6512template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6513 size_t len, fwkType fwk_name)
6514{
6515 for (size_t i = 0; i < len; i++) {
6516 if (arr[i].fwk_name == fwk_name) {
6517 return arr[i].hal_name;
6518 }
6519 }
6520
6521 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6522 return NAME_NOT_FOUND;
6523}
6524
6525/*===========================================================================
6526 * FUNCTION : lookupProp
6527 *
6528 * DESCRIPTION: lookup a value by its name
6529 *
6530 * PARAMETERS :
6531 * @arr : map between the two enums
6532 * @len : size of the map
6533 * @name : name to be looked up
6534 *
6535 * RETURN : Value if found
6536 * CAM_CDS_MODE_MAX if not found
6537 *==========================================================================*/
6538template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6539 size_t len, const char *name)
6540{
6541 if (name) {
6542 for (size_t i = 0; i < len; i++) {
6543 if (!strcmp(arr[i].desc, name)) {
6544 return arr[i].val;
6545 }
6546 }
6547 }
6548 return CAM_CDS_MODE_MAX;
6549}
6550
6551/*===========================================================================
6552 *
6553 * DESCRIPTION:
6554 *
6555 * PARAMETERS :
6556 * @metadata : metadata information from callback
6557 * @timestamp: metadata buffer timestamp
6558 * @request_id: request id
6559 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006560 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006561 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6562 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006563 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006564 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6565 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006566 *
6567 * RETURN : camera_metadata_t*
6568 * metadata in a format specified by fwk
6569 *==========================================================================*/
6570camera_metadata_t*
6571QCamera3HardwareInterface::translateFromHalMetadata(
6572 metadata_buffer_t *metadata,
6573 nsecs_t timestamp,
6574 int32_t request_id,
6575 const CameraMetadata& jpegMetadata,
6576 uint8_t pipeline_depth,
6577 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006578 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006579 /* DevCamDebug metadata translateFromHalMetadata argument */
6580 uint8_t DevCamDebug_meta_enable,
6581 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006582 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006583 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006584 bool lastMetadataInBatch,
6585 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006586{
6587 CameraMetadata camMetadata;
6588 camera_metadata_t *resultMetadata;
6589
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006590 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006591 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6592 * Timestamp is needed because it's used for shutter notify calculation.
6593 * */
6594 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6595 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006596 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006597 }
6598
Thierry Strudel3d639192016-09-09 11:52:26 -07006599 if (jpegMetadata.entryCount())
6600 camMetadata.append(jpegMetadata);
6601
6602 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6603 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6604 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6605 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006606 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006607 if (mBatchSize == 0) {
6608 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6609 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6610 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006611
Samuel Ha68ba5172016-12-15 18:41:12 -08006612 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6613 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6614 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6615 // DevCamDebug metadata translateFromHalMetadata AF
6616 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6617 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6618 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6619 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6620 }
6621 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6622 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6623 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6624 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6625 }
6626 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6627 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6628 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6629 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6630 }
6631 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6632 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6633 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6634 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6635 }
6636 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6637 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6638 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6639 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6640 }
6641 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6642 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6643 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6644 *DevCamDebug_af_monitor_pdaf_target_pos;
6645 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6646 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6647 }
6648 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6649 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6650 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6651 *DevCamDebug_af_monitor_pdaf_confidence;
6652 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6653 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6654 }
6655 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6656 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6657 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6658 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6659 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6660 }
6661 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6662 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6663 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6664 *DevCamDebug_af_monitor_tof_target_pos;
6665 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6666 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6667 }
6668 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6669 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6670 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6671 *DevCamDebug_af_monitor_tof_confidence;
6672 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6673 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6674 }
6675 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6676 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6677 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6678 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6679 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6680 }
6681 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6682 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6683 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6684 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6685 &fwk_DevCamDebug_af_monitor_type_select, 1);
6686 }
6687 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6688 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6689 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6690 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6691 &fwk_DevCamDebug_af_monitor_refocus, 1);
6692 }
6693 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6694 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6695 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6696 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6697 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6698 }
6699 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6700 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6701 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6702 *DevCamDebug_af_search_pdaf_target_pos;
6703 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6704 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6705 }
6706 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6707 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6708 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6709 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6710 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6711 }
6712 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6713 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6714 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6715 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6716 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6717 }
6718 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6719 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6720 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6721 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6722 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6723 }
6724 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6725 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6726 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6727 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6728 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6729 }
6730 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6731 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6732 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6733 *DevCamDebug_af_search_tof_target_pos;
6734 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6735 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6736 }
6737 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6738 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6739 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6740 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6741 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6742 }
6743 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6744 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6745 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6746 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6747 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6748 }
6749 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6750 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6751 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6752 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6753 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6754 }
6755 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6756 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6757 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6758 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6759 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6760 }
6761 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6762 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6763 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6764 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6765 &fwk_DevCamDebug_af_search_type_select, 1);
6766 }
6767 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6768 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6769 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6770 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6771 &fwk_DevCamDebug_af_search_next_pos, 1);
6772 }
6773 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6774 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6775 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6776 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6777 &fwk_DevCamDebug_af_search_target_pos, 1);
6778 }
6779 // DevCamDebug metadata translateFromHalMetadata AEC
6780 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6781 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6782 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6783 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6784 }
6785 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6786 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6787 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6788 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6789 }
6790 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6791 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6792 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6793 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6794 }
6795 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6796 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6797 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6798 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6799 }
6800 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6801 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6802 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6803 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6804 }
6805 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6806 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6807 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6808 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6809 }
6810 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6811 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6812 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6813 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6814 }
6815 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6816 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6817 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6818 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6819 }
Samuel Ha34229982017-02-17 13:51:11 -08006820 // DevCamDebug metadata translateFromHalMetadata zzHDR
6821 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6822 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6823 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6824 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6825 }
6826 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6827 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006828 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006829 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6830 }
6831 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6832 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6833 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6834 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6835 }
6836 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6837 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006838 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006839 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6840 }
6841 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6842 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6843 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6844 *DevCamDebug_aec_hdr_sensitivity_ratio;
6845 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6846 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6847 }
6848 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6849 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6850 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6851 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6852 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6853 }
6854 // DevCamDebug metadata translateFromHalMetadata ADRC
6855 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6856 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6857 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6858 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6859 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6860 }
6861 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6862 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6863 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6864 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6865 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6866 }
6867 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6868 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6869 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6870 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6871 }
6872 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6873 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6874 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6875 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6876 }
6877 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6878 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6879 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6880 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6881 }
6882 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6883 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6884 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6885 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6886 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006887 // DevCamDebug metadata translateFromHalMetadata AWB
6888 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6889 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6890 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6891 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6892 }
6893 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6894 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6895 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6896 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6897 }
6898 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6899 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6900 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6901 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6902 }
6903 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6904 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6905 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6906 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6907 }
6908 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6909 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6910 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6911 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6912 }
6913 }
6914 // atrace_end(ATRACE_TAG_ALWAYS);
6915
Thierry Strudel3d639192016-09-09 11:52:26 -07006916 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6917 int64_t fwk_frame_number = *frame_number;
6918 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6919 }
6920
6921 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6922 int32_t fps_range[2];
6923 fps_range[0] = (int32_t)float_range->min_fps;
6924 fps_range[1] = (int32_t)float_range->max_fps;
6925 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6926 fps_range, 2);
6927 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6928 fps_range[0], fps_range[1]);
6929 }
6930
6931 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6932 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6933 }
6934
6935 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6936 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6937 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6938 *sceneMode);
6939 if (NAME_NOT_FOUND != val) {
6940 uint8_t fwkSceneMode = (uint8_t)val;
6941 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6942 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6943 fwkSceneMode);
6944 }
6945 }
6946
6947 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6948 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6949 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6950 }
6951
6952 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6953 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6954 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6955 }
6956
6957 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6958 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6959 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6960 }
6961
6962 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6963 CAM_INTF_META_EDGE_MODE, metadata) {
6964 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6965 }
6966
6967 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6968 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6969 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6970 }
6971
6972 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6973 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6974 }
6975
6976 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6977 if (0 <= *flashState) {
6978 uint8_t fwk_flashState = (uint8_t) *flashState;
6979 if (!gCamCapability[mCameraId]->flash_available) {
6980 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6981 }
6982 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6983 }
6984 }
6985
6986 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6987 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6988 if (NAME_NOT_FOUND != val) {
6989 uint8_t fwk_flashMode = (uint8_t)val;
6990 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6991 }
6992 }
6993
6994 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6995 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6996 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6997 }
6998
6999 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7000 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7001 }
7002
7003 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7004 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7005 }
7006
7007 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7008 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7009 }
7010
7011 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7012 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7013 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7014 }
7015
7016 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7017 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7018 LOGD("fwk_videoStab = %d", fwk_videoStab);
7019 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7020 } else {
7021 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7022 // and so hardcoding the Video Stab result to OFF mode.
7023 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7024 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007025 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007026 }
7027
7028 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7029 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7030 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7031 }
7032
7033 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7034 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7035 }
7036
Thierry Strudel3d639192016-09-09 11:52:26 -07007037 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7038 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007039 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007040
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007041 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7042 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007043
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007044 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007045 blackLevelAppliedPattern->cam_black_level[0],
7046 blackLevelAppliedPattern->cam_black_level[1],
7047 blackLevelAppliedPattern->cam_black_level[2],
7048 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007049 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7050 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007051
7052#ifndef USE_HAL_3_3
7053 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307054 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007055 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307056 fwk_blackLevelInd[0] /= 16.0;
7057 fwk_blackLevelInd[1] /= 16.0;
7058 fwk_blackLevelInd[2] /= 16.0;
7059 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007060 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7061 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007062#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007063 }
7064
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007065#ifndef USE_HAL_3_3
7066 // Fixed whitelevel is used by ISP/Sensor
7067 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7068 &gCamCapability[mCameraId]->white_level, 1);
7069#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007070
7071 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7072 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7073 int32_t scalerCropRegion[4];
7074 scalerCropRegion[0] = hScalerCropRegion->left;
7075 scalerCropRegion[1] = hScalerCropRegion->top;
7076 scalerCropRegion[2] = hScalerCropRegion->width;
7077 scalerCropRegion[3] = hScalerCropRegion->height;
7078
7079 // Adjust crop region from sensor output coordinate system to active
7080 // array coordinate system.
7081 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7082 scalerCropRegion[2], scalerCropRegion[3]);
7083
7084 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7085 }
7086
7087 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7088 LOGD("sensorExpTime = %lld", *sensorExpTime);
7089 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7090 }
7091
7092 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7093 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7094 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7095 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7096 }
7097
7098 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7099 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7100 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7101 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7102 sensorRollingShutterSkew, 1);
7103 }
7104
7105 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7106 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7107 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7108
7109 //calculate the noise profile based on sensitivity
7110 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7111 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7112 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7113 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7114 noise_profile[i] = noise_profile_S;
7115 noise_profile[i+1] = noise_profile_O;
7116 }
7117 LOGD("noise model entry (S, O) is (%f, %f)",
7118 noise_profile_S, noise_profile_O);
7119 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7120 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7121 }
7122
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007123#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007124 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007125 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007126 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007127 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007128 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7129 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7130 }
7131 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007132#endif
7133
Thierry Strudel3d639192016-09-09 11:52:26 -07007134 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7135 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7136 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7137 }
7138
7139 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7140 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7141 *faceDetectMode);
7142 if (NAME_NOT_FOUND != val) {
7143 uint8_t fwk_faceDetectMode = (uint8_t)val;
7144 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7145
7146 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7147 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7148 CAM_INTF_META_FACE_DETECTION, metadata) {
7149 uint8_t numFaces = MIN(
7150 faceDetectionInfo->num_faces_detected, MAX_ROI);
7151 int32_t faceIds[MAX_ROI];
7152 uint8_t faceScores[MAX_ROI];
7153 int32_t faceRectangles[MAX_ROI * 4];
7154 int32_t faceLandmarks[MAX_ROI * 6];
7155 size_t j = 0, k = 0;
7156
7157 for (size_t i = 0; i < numFaces; i++) {
7158 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7159 // Adjust crop region from sensor output coordinate system to active
7160 // array coordinate system.
7161 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7162 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7163 rect.width, rect.height);
7164
7165 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7166 faceRectangles+j, -1);
7167
Jason Lee8ce36fa2017-04-19 19:40:37 -07007168 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7169 "bottom-right (%d, %d)",
7170 faceDetectionInfo->frame_id, i,
7171 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7172 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7173
Thierry Strudel3d639192016-09-09 11:52:26 -07007174 j+= 4;
7175 }
7176 if (numFaces <= 0) {
7177 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7178 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7179 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7180 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7181 }
7182
7183 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7184 numFaces);
7185 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7186 faceRectangles, numFaces * 4U);
7187 if (fwk_faceDetectMode ==
7188 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7189 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7190 CAM_INTF_META_FACE_LANDMARK, metadata) {
7191
7192 for (size_t i = 0; i < numFaces; i++) {
7193 // Map the co-ordinate sensor output coordinate system to active
7194 // array coordinate system.
7195 mCropRegionMapper.toActiveArray(
7196 landmarks->face_landmarks[i].left_eye_center.x,
7197 landmarks->face_landmarks[i].left_eye_center.y);
7198 mCropRegionMapper.toActiveArray(
7199 landmarks->face_landmarks[i].right_eye_center.x,
7200 landmarks->face_landmarks[i].right_eye_center.y);
7201 mCropRegionMapper.toActiveArray(
7202 landmarks->face_landmarks[i].mouth_center.x,
7203 landmarks->face_landmarks[i].mouth_center.y);
7204
7205 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007206
7207 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7208 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7209 faceDetectionInfo->frame_id, i,
7210 faceLandmarks[k + LEFT_EYE_X],
7211 faceLandmarks[k + LEFT_EYE_Y],
7212 faceLandmarks[k + RIGHT_EYE_X],
7213 faceLandmarks[k + RIGHT_EYE_Y],
7214 faceLandmarks[k + MOUTH_X],
7215 faceLandmarks[k + MOUTH_Y]);
7216
Thierry Strudel04e026f2016-10-10 11:27:36 -07007217 k+= TOTAL_LANDMARK_INDICES;
7218 }
7219 } else {
7220 for (size_t i = 0; i < numFaces; i++) {
7221 setInvalidLandmarks(faceLandmarks+k);
7222 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007223 }
7224 }
7225
Jason Lee49619db2017-04-13 12:07:22 -07007226 for (size_t i = 0; i < numFaces; i++) {
7227 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7228
7229 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7230 faceDetectionInfo->frame_id, i, faceIds[i]);
7231 }
7232
Thierry Strudel3d639192016-09-09 11:52:26 -07007233 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7234 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7235 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007236 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007237 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7238 CAM_INTF_META_FACE_BLINK, metadata) {
7239 uint8_t detected[MAX_ROI];
7240 uint8_t degree[MAX_ROI * 2];
7241 for (size_t i = 0; i < numFaces; i++) {
7242 detected[i] = blinks->blink[i].blink_detected;
7243 degree[2 * i] = blinks->blink[i].left_blink;
7244 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007245
Jason Lee49619db2017-04-13 12:07:22 -07007246 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7247 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7248 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7249 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007250 }
7251 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7252 detected, numFaces);
7253 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7254 degree, numFaces * 2);
7255 }
7256 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7257 CAM_INTF_META_FACE_SMILE, metadata) {
7258 uint8_t degree[MAX_ROI];
7259 uint8_t confidence[MAX_ROI];
7260 for (size_t i = 0; i < numFaces; i++) {
7261 degree[i] = smiles->smile[i].smile_degree;
7262 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007263
Jason Lee49619db2017-04-13 12:07:22 -07007264 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7265 "smile_degree=%d, smile_score=%d",
7266 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007267 }
7268 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7269 degree, numFaces);
7270 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7271 confidence, numFaces);
7272 }
7273 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7274 CAM_INTF_META_FACE_GAZE, metadata) {
7275 int8_t angle[MAX_ROI];
7276 int32_t direction[MAX_ROI * 3];
7277 int8_t degree[MAX_ROI * 2];
7278 for (size_t i = 0; i < numFaces; i++) {
7279 angle[i] = gazes->gaze[i].gaze_angle;
7280 direction[3 * i] = gazes->gaze[i].updown_dir;
7281 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7282 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7283 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7284 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007285
7286 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7287 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7288 "left_right_gaze=%d, top_bottom_gaze=%d",
7289 faceDetectionInfo->frame_id, i, angle[i],
7290 direction[3 * i], direction[3 * i + 1],
7291 direction[3 * i + 2],
7292 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007293 }
7294 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7295 (uint8_t *)angle, numFaces);
7296 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7297 direction, numFaces * 3);
7298 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7299 (uint8_t *)degree, numFaces * 2);
7300 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007301 }
7302 }
7303 }
7304 }
7305
7306 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7307 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007308 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007309 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007310 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007311
Shuzhen Wang14415f52016-11-16 18:26:18 -08007312 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7313 histogramBins = *histBins;
7314 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7315 }
7316
7317 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007318 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7319 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007320 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007321
7322 switch (stats_data->type) {
7323 case CAM_HISTOGRAM_TYPE_BAYER:
7324 switch (stats_data->bayer_stats.data_type) {
7325 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007326 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7327 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007328 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007329 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7330 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007331 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007332 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7333 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007334 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007335 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007336 case CAM_STATS_CHANNEL_R:
7337 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007338 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7339 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007340 }
7341 break;
7342 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007343 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007344 break;
7345 }
7346
Shuzhen Wang14415f52016-11-16 18:26:18 -08007347 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007348 }
7349 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007350 }
7351
7352 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7353 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7354 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7355 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7356 }
7357
7358 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7359 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7360 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7361 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7362 }
7363
7364 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7365 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7366 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7367 CAM_MAX_SHADING_MAP_HEIGHT);
7368 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7369 CAM_MAX_SHADING_MAP_WIDTH);
7370 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7371 lensShadingMap->lens_shading, 4U * map_width * map_height);
7372 }
7373
7374 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7375 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7376 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7377 }
7378
7379 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7380 //Populate CAM_INTF_META_TONEMAP_CURVES
7381 /* ch0 = G, ch 1 = B, ch 2 = R*/
7382 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7383 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7384 tonemap->tonemap_points_cnt,
7385 CAM_MAX_TONEMAP_CURVE_SIZE);
7386 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7387 }
7388
7389 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7390 &tonemap->curves[0].tonemap_points[0][0],
7391 tonemap->tonemap_points_cnt * 2);
7392
7393 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7394 &tonemap->curves[1].tonemap_points[0][0],
7395 tonemap->tonemap_points_cnt * 2);
7396
7397 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7398 &tonemap->curves[2].tonemap_points[0][0],
7399 tonemap->tonemap_points_cnt * 2);
7400 }
7401
7402 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7403 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7404 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7405 CC_GAIN_MAX);
7406 }
7407
7408 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7409 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7410 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7411 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7412 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7413 }
7414
7415 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7416 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7417 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7418 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7419 toneCurve->tonemap_points_cnt,
7420 CAM_MAX_TONEMAP_CURVE_SIZE);
7421 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7422 }
7423 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7424 (float*)toneCurve->curve.tonemap_points,
7425 toneCurve->tonemap_points_cnt * 2);
7426 }
7427
7428 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7429 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7430 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7431 predColorCorrectionGains->gains, 4);
7432 }
7433
7434 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7435 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7436 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7437 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7438 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7439 }
7440
7441 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7442 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7443 }
7444
7445 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7446 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7447 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7448 }
7449
7450 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7451 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7452 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7453 }
7454
7455 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7456 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7457 *effectMode);
7458 if (NAME_NOT_FOUND != val) {
7459 uint8_t fwk_effectMode = (uint8_t)val;
7460 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7461 }
7462 }
7463
7464 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7465 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7466 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7467 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7468 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7469 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7470 }
7471 int32_t fwk_testPatternData[4];
7472 fwk_testPatternData[0] = testPatternData->r;
7473 fwk_testPatternData[3] = testPatternData->b;
7474 switch (gCamCapability[mCameraId]->color_arrangement) {
7475 case CAM_FILTER_ARRANGEMENT_RGGB:
7476 case CAM_FILTER_ARRANGEMENT_GRBG:
7477 fwk_testPatternData[1] = testPatternData->gr;
7478 fwk_testPatternData[2] = testPatternData->gb;
7479 break;
7480 case CAM_FILTER_ARRANGEMENT_GBRG:
7481 case CAM_FILTER_ARRANGEMENT_BGGR:
7482 fwk_testPatternData[2] = testPatternData->gr;
7483 fwk_testPatternData[1] = testPatternData->gb;
7484 break;
7485 default:
7486 LOGE("color arrangement %d is not supported",
7487 gCamCapability[mCameraId]->color_arrangement);
7488 break;
7489 }
7490 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7491 }
7492
7493 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7494 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7495 }
7496
7497 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7498 String8 str((const char *)gps_methods);
7499 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7500 }
7501
7502 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7503 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7504 }
7505
7506 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7507 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7508 }
7509
7510 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7511 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7512 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7513 }
7514
7515 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7516 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7517 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7518 }
7519
7520 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7521 int32_t fwk_thumb_size[2];
7522 fwk_thumb_size[0] = thumb_size->width;
7523 fwk_thumb_size[1] = thumb_size->height;
7524 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7525 }
7526
7527 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7528 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7529 privateData,
7530 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7531 }
7532
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007533 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007534 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007535 meteringMode, 1);
7536 }
7537
Thierry Strudel54dc9782017-02-15 12:12:10 -08007538 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7539 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7540 LOGD("hdr_scene_data: %d %f\n",
7541 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7542 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7543 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7544 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7545 &isHdr, 1);
7546 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7547 &isHdrConfidence, 1);
7548 }
7549
7550
7551
Thierry Strudel3d639192016-09-09 11:52:26 -07007552 if (metadata->is_tuning_params_valid) {
7553 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7554 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7555 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7556
7557
7558 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7559 sizeof(uint32_t));
7560 data += sizeof(uint32_t);
7561
7562 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7563 sizeof(uint32_t));
7564 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7565 data += sizeof(uint32_t);
7566
7567 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7568 sizeof(uint32_t));
7569 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7570 data += sizeof(uint32_t);
7571
7572 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7573 sizeof(uint32_t));
7574 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7575 data += sizeof(uint32_t);
7576
7577 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7578 sizeof(uint32_t));
7579 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7580 data += sizeof(uint32_t);
7581
7582 metadata->tuning_params.tuning_mod3_data_size = 0;
7583 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7584 sizeof(uint32_t));
7585 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7586 data += sizeof(uint32_t);
7587
7588 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7589 TUNING_SENSOR_DATA_MAX);
7590 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7591 count);
7592 data += count;
7593
7594 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7595 TUNING_VFE_DATA_MAX);
7596 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7597 count);
7598 data += count;
7599
7600 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7601 TUNING_CPP_DATA_MAX);
7602 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7603 count);
7604 data += count;
7605
7606 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7607 TUNING_CAC_DATA_MAX);
7608 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7609 count);
7610 data += count;
7611
7612 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7613 (int32_t *)(void *)tuning_meta_data_blob,
7614 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7615 }
7616
7617 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7618 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7619 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7620 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7621 NEUTRAL_COL_POINTS);
7622 }
7623
7624 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7625 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7626 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7627 }
7628
7629 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7630 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7631 // Adjust crop region from sensor output coordinate system to active
7632 // array coordinate system.
7633 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7634 hAeRegions->rect.width, hAeRegions->rect.height);
7635
7636 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7637 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7638 REGIONS_TUPLE_COUNT);
7639 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7640 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7641 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7642 hAeRegions->rect.height);
7643 }
7644
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007645 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7646 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7647 if (NAME_NOT_FOUND != val) {
7648 uint8_t fwkAfMode = (uint8_t)val;
7649 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7650 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7651 } else {
7652 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7653 val);
7654 }
7655 }
7656
Thierry Strudel3d639192016-09-09 11:52:26 -07007657 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7658 uint8_t fwk_afState = (uint8_t) *afState;
7659 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007660 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007661 }
7662
7663 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7664 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7665 }
7666
7667 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7668 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7669 }
7670
7671 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7672 uint8_t fwk_lensState = *lensState;
7673 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7674 }
7675
Thierry Strudel3d639192016-09-09 11:52:26 -07007676
7677 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007678 uint32_t ab_mode = *hal_ab_mode;
7679 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7680 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7681 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7682 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007683 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007684 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007685 if (NAME_NOT_FOUND != val) {
7686 uint8_t fwk_ab_mode = (uint8_t)val;
7687 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7688 }
7689 }
7690
7691 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7692 int val = lookupFwkName(SCENE_MODES_MAP,
7693 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7694 if (NAME_NOT_FOUND != val) {
7695 uint8_t fwkBestshotMode = (uint8_t)val;
7696 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7697 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7698 } else {
7699 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7700 }
7701 }
7702
7703 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7704 uint8_t fwk_mode = (uint8_t) *mode;
7705 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7706 }
7707
7708 /* Constant metadata values to be update*/
7709 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7710 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7711
7712 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7713 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7714
7715 int32_t hotPixelMap[2];
7716 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7717
7718 // CDS
7719 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7720 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7721 }
7722
Thierry Strudel04e026f2016-10-10 11:27:36 -07007723 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7724 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007725 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007726 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7727 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7728 } else {
7729 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7730 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007731
7732 if(fwk_hdr != curr_hdr_state) {
7733 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7734 if(fwk_hdr)
7735 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7736 else
7737 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7738 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007739 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7740 }
7741
Thierry Strudel54dc9782017-02-15 12:12:10 -08007742 //binning correction
7743 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7744 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7745 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7746 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7747 }
7748
Thierry Strudel04e026f2016-10-10 11:27:36 -07007749 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007750 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007751 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7752 int8_t is_ir_on = 0;
7753
7754 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7755 if(is_ir_on != curr_ir_state) {
7756 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7757 if(is_ir_on)
7758 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7759 else
7760 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7761 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007762 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007763 }
7764
Thierry Strudel269c81a2016-10-12 12:13:59 -07007765 // AEC SPEED
7766 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7767 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7768 }
7769
7770 // AWB SPEED
7771 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7772 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7773 }
7774
Thierry Strudel3d639192016-09-09 11:52:26 -07007775 // TNR
7776 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7777 uint8_t tnr_enable = tnr->denoise_enable;
7778 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007779 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7780 int8_t is_tnr_on = 0;
7781
7782 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7783 if(is_tnr_on != curr_tnr_state) {
7784 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7785 if(is_tnr_on)
7786 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7787 else
7788 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7789 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007790
7791 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7792 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7793 }
7794
7795 // Reprocess crop data
7796 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7797 uint8_t cnt = crop_data->num_of_streams;
7798 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7799 // mm-qcamera-daemon only posts crop_data for streams
7800 // not linked to pproc. So no valid crop metadata is not
7801 // necessarily an error case.
7802 LOGD("No valid crop metadata entries");
7803 } else {
7804 uint32_t reproc_stream_id;
7805 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7806 LOGD("No reprocessible stream found, ignore crop data");
7807 } else {
7808 int rc = NO_ERROR;
7809 Vector<int32_t> roi_map;
7810 int32_t *crop = new int32_t[cnt*4];
7811 if (NULL == crop) {
7812 rc = NO_MEMORY;
7813 }
7814 if (NO_ERROR == rc) {
7815 int32_t streams_found = 0;
7816 for (size_t i = 0; i < cnt; i++) {
7817 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7818 if (pprocDone) {
7819 // HAL already does internal reprocessing,
7820 // either via reprocessing before JPEG encoding,
7821 // or offline postprocessing for pproc bypass case.
7822 crop[0] = 0;
7823 crop[1] = 0;
7824 crop[2] = mInputStreamInfo.dim.width;
7825 crop[3] = mInputStreamInfo.dim.height;
7826 } else {
7827 crop[0] = crop_data->crop_info[i].crop.left;
7828 crop[1] = crop_data->crop_info[i].crop.top;
7829 crop[2] = crop_data->crop_info[i].crop.width;
7830 crop[3] = crop_data->crop_info[i].crop.height;
7831 }
7832 roi_map.add(crop_data->crop_info[i].roi_map.left);
7833 roi_map.add(crop_data->crop_info[i].roi_map.top);
7834 roi_map.add(crop_data->crop_info[i].roi_map.width);
7835 roi_map.add(crop_data->crop_info[i].roi_map.height);
7836 streams_found++;
7837 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7838 crop[0], crop[1], crop[2], crop[3]);
7839 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7840 crop_data->crop_info[i].roi_map.left,
7841 crop_data->crop_info[i].roi_map.top,
7842 crop_data->crop_info[i].roi_map.width,
7843 crop_data->crop_info[i].roi_map.height);
7844 break;
7845
7846 }
7847 }
7848 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7849 &streams_found, 1);
7850 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7851 crop, (size_t)(streams_found * 4));
7852 if (roi_map.array()) {
7853 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7854 roi_map.array(), roi_map.size());
7855 }
7856 }
7857 if (crop) {
7858 delete [] crop;
7859 }
7860 }
7861 }
7862 }
7863
7864 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7865 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7866 // so hardcoding the CAC result to OFF mode.
7867 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7868 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7869 } else {
7870 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7871 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7872 *cacMode);
7873 if (NAME_NOT_FOUND != val) {
7874 uint8_t resultCacMode = (uint8_t)val;
7875 // check whether CAC result from CB is equal to Framework set CAC mode
7876 // If not equal then set the CAC mode came in corresponding request
7877 if (fwk_cacMode != resultCacMode) {
7878 resultCacMode = fwk_cacMode;
7879 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007880 //Check if CAC is disabled by property
7881 if (m_cacModeDisabled) {
7882 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7883 }
7884
Thierry Strudel3d639192016-09-09 11:52:26 -07007885 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7886 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7887 } else {
7888 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7889 }
7890 }
7891 }
7892
7893 // Post blob of cam_cds_data through vendor tag.
7894 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7895 uint8_t cnt = cdsInfo->num_of_streams;
7896 cam_cds_data_t cdsDataOverride;
7897 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7898 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7899 cdsDataOverride.num_of_streams = 1;
7900 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7901 uint32_t reproc_stream_id;
7902 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7903 LOGD("No reprocessible stream found, ignore cds data");
7904 } else {
7905 for (size_t i = 0; i < cnt; i++) {
7906 if (cdsInfo->cds_info[i].stream_id ==
7907 reproc_stream_id) {
7908 cdsDataOverride.cds_info[0].cds_enable =
7909 cdsInfo->cds_info[i].cds_enable;
7910 break;
7911 }
7912 }
7913 }
7914 } else {
7915 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7916 }
7917 camMetadata.update(QCAMERA3_CDS_INFO,
7918 (uint8_t *)&cdsDataOverride,
7919 sizeof(cam_cds_data_t));
7920 }
7921
7922 // Ldaf calibration data
7923 if (!mLdafCalibExist) {
7924 IF_META_AVAILABLE(uint32_t, ldafCalib,
7925 CAM_INTF_META_LDAF_EXIF, metadata) {
7926 mLdafCalibExist = true;
7927 mLdafCalib[0] = ldafCalib[0];
7928 mLdafCalib[1] = ldafCalib[1];
7929 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7930 ldafCalib[0], ldafCalib[1]);
7931 }
7932 }
7933
Thierry Strudel54dc9782017-02-15 12:12:10 -08007934 // EXIF debug data through vendor tag
7935 /*
7936 * Mobicat Mask can assume 3 values:
7937 * 1 refers to Mobicat data,
7938 * 2 refers to Stats Debug and Exif Debug Data
7939 * 3 refers to Mobicat and Stats Debug Data
7940 * We want to make sure that we are sending Exif debug data
7941 * only when Mobicat Mask is 2.
7942 */
7943 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7944 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7945 (uint8_t *)(void *)mExifParams.debug_params,
7946 sizeof(mm_jpeg_debug_exif_params_t));
7947 }
7948
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007949 // Reprocess and DDM debug data through vendor tag
7950 cam_reprocess_info_t repro_info;
7951 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007952 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7953 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007954 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007955 }
7956 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7957 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007958 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007959 }
7960 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7961 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007962 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007963 }
7964 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7965 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007966 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007967 }
7968 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7969 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007970 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007971 }
7972 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007973 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007974 }
7975 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7976 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007977 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007978 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007979 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7980 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7981 }
7982 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7983 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7984 }
7985 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7986 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007987
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007988 // INSTANT AEC MODE
7989 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7990 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7991 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7992 }
7993
Shuzhen Wange763e802016-03-31 10:24:29 -07007994 // AF scene change
7995 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7996 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7997 }
7998
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07007999 // Enable ZSL
8000 if (enableZsl != nullptr) {
8001 uint8_t value = *enableZsl ?
8002 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8003 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8004 }
8005
Thierry Strudel3d639192016-09-09 11:52:26 -07008006 resultMetadata = camMetadata.release();
8007 return resultMetadata;
8008}
8009
8010/*===========================================================================
8011 * FUNCTION : saveExifParams
8012 *
8013 * DESCRIPTION:
8014 *
8015 * PARAMETERS :
8016 * @metadata : metadata information from callback
8017 *
8018 * RETURN : none
8019 *
8020 *==========================================================================*/
8021void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8022{
8023 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8024 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8025 if (mExifParams.debug_params) {
8026 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8027 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8028 }
8029 }
8030 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8031 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8032 if (mExifParams.debug_params) {
8033 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8034 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8035 }
8036 }
8037 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8038 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8039 if (mExifParams.debug_params) {
8040 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8041 mExifParams.debug_params->af_debug_params_valid = TRUE;
8042 }
8043 }
8044 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8045 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8046 if (mExifParams.debug_params) {
8047 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8048 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8049 }
8050 }
8051 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8052 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8053 if (mExifParams.debug_params) {
8054 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8055 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8056 }
8057 }
8058 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8059 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8060 if (mExifParams.debug_params) {
8061 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8062 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8063 }
8064 }
8065 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8066 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8067 if (mExifParams.debug_params) {
8068 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8069 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8070 }
8071 }
8072 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8073 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8074 if (mExifParams.debug_params) {
8075 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8076 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8077 }
8078 }
8079}
8080
8081/*===========================================================================
8082 * FUNCTION : get3AExifParams
8083 *
8084 * DESCRIPTION:
8085 *
8086 * PARAMETERS : none
8087 *
8088 *
8089 * RETURN : mm_jpeg_exif_params_t
8090 *
8091 *==========================================================================*/
8092mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8093{
8094 return mExifParams;
8095}
8096
8097/*===========================================================================
8098 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8099 *
8100 * DESCRIPTION:
8101 *
8102 * PARAMETERS :
8103 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008104 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8105 * urgent metadata in a batch. Always true for
8106 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008107 *
8108 * RETURN : camera_metadata_t*
8109 * metadata in a format specified by fwk
8110 *==========================================================================*/
8111camera_metadata_t*
8112QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008113 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008114{
8115 CameraMetadata camMetadata;
8116 camera_metadata_t *resultMetadata;
8117
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008118 if (!lastUrgentMetadataInBatch) {
8119 /* In batch mode, use empty metadata if this is not the last in batch
8120 */
8121 resultMetadata = allocate_camera_metadata(0, 0);
8122 return resultMetadata;
8123 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008124
8125 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8126 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8127 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8128 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8129 }
8130
8131 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8132 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8133 &aecTrigger->trigger, 1);
8134 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8135 &aecTrigger->trigger_id, 1);
8136 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8137 aecTrigger->trigger);
8138 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8139 aecTrigger->trigger_id);
8140 }
8141
8142 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8143 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8144 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8145 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8146 }
8147
Thierry Strudel3d639192016-09-09 11:52:26 -07008148 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8149 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8150 &af_trigger->trigger, 1);
8151 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8152 af_trigger->trigger);
8153 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8154 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8155 af_trigger->trigger_id);
8156 }
8157
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008158 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8159 /*af regions*/
8160 int32_t afRegions[REGIONS_TUPLE_COUNT];
8161 // Adjust crop region from sensor output coordinate system to active
8162 // array coordinate system.
8163 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8164 hAfRegions->rect.width, hAfRegions->rect.height);
8165
8166 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8167 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8168 REGIONS_TUPLE_COUNT);
8169 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8170 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8171 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8172 hAfRegions->rect.height);
8173 }
8174
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008175 // AF region confidence
8176 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8177 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8178 }
8179
Thierry Strudel3d639192016-09-09 11:52:26 -07008180 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8181 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8182 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8183 if (NAME_NOT_FOUND != val) {
8184 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8185 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8186 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8187 } else {
8188 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8189 }
8190 }
8191
8192 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8193 uint32_t aeMode = CAM_AE_MODE_MAX;
8194 int32_t flashMode = CAM_FLASH_MODE_MAX;
8195 int32_t redeye = -1;
8196 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8197 aeMode = *pAeMode;
8198 }
8199 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8200 flashMode = *pFlashMode;
8201 }
8202 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8203 redeye = *pRedeye;
8204 }
8205
8206 if (1 == redeye) {
8207 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8208 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8209 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8210 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8211 flashMode);
8212 if (NAME_NOT_FOUND != val) {
8213 fwk_aeMode = (uint8_t)val;
8214 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8215 } else {
8216 LOGE("Unsupported flash mode %d", flashMode);
8217 }
8218 } else if (aeMode == CAM_AE_MODE_ON) {
8219 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8220 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8221 } else if (aeMode == CAM_AE_MODE_OFF) {
8222 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8223 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008224 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8225 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8226 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008227 } else {
8228 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8229 "flashMode:%d, aeMode:%u!!!",
8230 redeye, flashMode, aeMode);
8231 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008232 if (mInstantAEC) {
8233 // Increment frame Idx count untill a bound reached for instant AEC.
8234 mInstantAecFrameIdxCount++;
8235 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8236 CAM_INTF_META_AEC_INFO, metadata) {
8237 LOGH("ae_params->settled = %d",ae_params->settled);
8238 // If AEC settled, or if number of frames reached bound value,
8239 // should reset instant AEC.
8240 if (ae_params->settled ||
8241 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8242 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8243 mInstantAEC = false;
8244 mResetInstantAEC = true;
8245 mInstantAecFrameIdxCount = 0;
8246 }
8247 }
8248 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008249 resultMetadata = camMetadata.release();
8250 return resultMetadata;
8251}
8252
8253/*===========================================================================
8254 * FUNCTION : dumpMetadataToFile
8255 *
8256 * DESCRIPTION: Dumps tuning metadata to file system
8257 *
8258 * PARAMETERS :
8259 * @meta : tuning metadata
8260 * @dumpFrameCount : current dump frame count
8261 * @enabled : Enable mask
8262 *
8263 *==========================================================================*/
8264void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8265 uint32_t &dumpFrameCount,
8266 bool enabled,
8267 const char *type,
8268 uint32_t frameNumber)
8269{
8270 //Some sanity checks
8271 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8272 LOGE("Tuning sensor data size bigger than expected %d: %d",
8273 meta.tuning_sensor_data_size,
8274 TUNING_SENSOR_DATA_MAX);
8275 return;
8276 }
8277
8278 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8279 LOGE("Tuning VFE data size bigger than expected %d: %d",
8280 meta.tuning_vfe_data_size,
8281 TUNING_VFE_DATA_MAX);
8282 return;
8283 }
8284
8285 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8286 LOGE("Tuning CPP data size bigger than expected %d: %d",
8287 meta.tuning_cpp_data_size,
8288 TUNING_CPP_DATA_MAX);
8289 return;
8290 }
8291
8292 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8293 LOGE("Tuning CAC data size bigger than expected %d: %d",
8294 meta.tuning_cac_data_size,
8295 TUNING_CAC_DATA_MAX);
8296 return;
8297 }
8298 //
8299
8300 if(enabled){
8301 char timeBuf[FILENAME_MAX];
8302 char buf[FILENAME_MAX];
8303 memset(buf, 0, sizeof(buf));
8304 memset(timeBuf, 0, sizeof(timeBuf));
8305 time_t current_time;
8306 struct tm * timeinfo;
8307 time (&current_time);
8308 timeinfo = localtime (&current_time);
8309 if (timeinfo != NULL) {
8310 strftime (timeBuf, sizeof(timeBuf),
8311 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8312 }
8313 String8 filePath(timeBuf);
8314 snprintf(buf,
8315 sizeof(buf),
8316 "%dm_%s_%d.bin",
8317 dumpFrameCount,
8318 type,
8319 frameNumber);
8320 filePath.append(buf);
8321 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8322 if (file_fd >= 0) {
8323 ssize_t written_len = 0;
8324 meta.tuning_data_version = TUNING_DATA_VERSION;
8325 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8326 written_len += write(file_fd, data, sizeof(uint32_t));
8327 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8328 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8329 written_len += write(file_fd, data, sizeof(uint32_t));
8330 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8331 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8332 written_len += write(file_fd, data, sizeof(uint32_t));
8333 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8334 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8335 written_len += write(file_fd, data, sizeof(uint32_t));
8336 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8337 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8338 written_len += write(file_fd, data, sizeof(uint32_t));
8339 meta.tuning_mod3_data_size = 0;
8340 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8341 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8342 written_len += write(file_fd, data, sizeof(uint32_t));
8343 size_t total_size = meta.tuning_sensor_data_size;
8344 data = (void *)((uint8_t *)&meta.data);
8345 written_len += write(file_fd, data, total_size);
8346 total_size = meta.tuning_vfe_data_size;
8347 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8348 written_len += write(file_fd, data, total_size);
8349 total_size = meta.tuning_cpp_data_size;
8350 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8351 written_len += write(file_fd, data, total_size);
8352 total_size = meta.tuning_cac_data_size;
8353 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8354 written_len += write(file_fd, data, total_size);
8355 close(file_fd);
8356 }else {
8357 LOGE("fail to open file for metadata dumping");
8358 }
8359 }
8360}
8361
8362/*===========================================================================
8363 * FUNCTION : cleanAndSortStreamInfo
8364 *
8365 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8366 * and sort them such that raw stream is at the end of the list
8367 * This is a workaround for camera daemon constraint.
8368 *
8369 * PARAMETERS : None
8370 *
8371 *==========================================================================*/
8372void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8373{
8374 List<stream_info_t *> newStreamInfo;
8375
8376 /*clean up invalid streams*/
8377 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8378 it != mStreamInfo.end();) {
8379 if(((*it)->status) == INVALID){
8380 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8381 delete channel;
8382 free(*it);
8383 it = mStreamInfo.erase(it);
8384 } else {
8385 it++;
8386 }
8387 }
8388
8389 // Move preview/video/callback/snapshot streams into newList
8390 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8391 it != mStreamInfo.end();) {
8392 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8393 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8394 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8395 newStreamInfo.push_back(*it);
8396 it = mStreamInfo.erase(it);
8397 } else
8398 it++;
8399 }
8400 // Move raw streams into newList
8401 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8402 it != mStreamInfo.end();) {
8403 newStreamInfo.push_back(*it);
8404 it = mStreamInfo.erase(it);
8405 }
8406
8407 mStreamInfo = newStreamInfo;
8408}
8409
8410/*===========================================================================
8411 * FUNCTION : extractJpegMetadata
8412 *
8413 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8414 * JPEG metadata is cached in HAL, and return as part of capture
8415 * result when metadata is returned from camera daemon.
8416 *
8417 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8418 * @request: capture request
8419 *
8420 *==========================================================================*/
8421void QCamera3HardwareInterface::extractJpegMetadata(
8422 CameraMetadata& jpegMetadata,
8423 const camera3_capture_request_t *request)
8424{
8425 CameraMetadata frame_settings;
8426 frame_settings = request->settings;
8427
8428 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8429 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8430 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8431 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8432
8433 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8434 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8435 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8436 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8437
8438 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8439 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8440 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8441 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8442
8443 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8444 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8445 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8446 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8447
8448 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8449 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8450 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8451 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8452
8453 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8454 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8455 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8456 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8457
8458 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8459 int32_t thumbnail_size[2];
8460 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8461 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8462 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8463 int32_t orientation =
8464 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008465 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008466 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8467 int32_t temp;
8468 temp = thumbnail_size[0];
8469 thumbnail_size[0] = thumbnail_size[1];
8470 thumbnail_size[1] = temp;
8471 }
8472 }
8473 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8474 thumbnail_size,
8475 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8476 }
8477
8478}
8479
8480/*===========================================================================
8481 * FUNCTION : convertToRegions
8482 *
8483 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8484 *
8485 * PARAMETERS :
8486 * @rect : cam_rect_t struct to convert
8487 * @region : int32_t destination array
8488 * @weight : if we are converting from cam_area_t, weight is valid
8489 * else weight = -1
8490 *
8491 *==========================================================================*/
8492void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8493 int32_t *region, int weight)
8494{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008495 region[FACE_LEFT] = rect.left;
8496 region[FACE_TOP] = rect.top;
8497 region[FACE_RIGHT] = rect.left + rect.width;
8498 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008499 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008500 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008501 }
8502}
8503
8504/*===========================================================================
8505 * FUNCTION : convertFromRegions
8506 *
8507 * DESCRIPTION: helper method to convert from array to cam_rect_t
8508 *
8509 * PARAMETERS :
8510 * @rect : cam_rect_t struct to convert
8511 * @region : int32_t destination array
8512 * @weight : if we are converting from cam_area_t, weight is valid
8513 * else weight = -1
8514 *
8515 *==========================================================================*/
8516void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008517 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008518{
Thierry Strudel3d639192016-09-09 11:52:26 -07008519 int32_t x_min = frame_settings.find(tag).data.i32[0];
8520 int32_t y_min = frame_settings.find(tag).data.i32[1];
8521 int32_t x_max = frame_settings.find(tag).data.i32[2];
8522 int32_t y_max = frame_settings.find(tag).data.i32[3];
8523 roi.weight = frame_settings.find(tag).data.i32[4];
8524 roi.rect.left = x_min;
8525 roi.rect.top = y_min;
8526 roi.rect.width = x_max - x_min;
8527 roi.rect.height = y_max - y_min;
8528}
8529
8530/*===========================================================================
8531 * FUNCTION : resetIfNeededROI
8532 *
8533 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8534 * crop region
8535 *
8536 * PARAMETERS :
8537 * @roi : cam_area_t struct to resize
8538 * @scalerCropRegion : cam_crop_region_t region to compare against
8539 *
8540 *
8541 *==========================================================================*/
8542bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8543 const cam_crop_region_t* scalerCropRegion)
8544{
8545 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8546 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8547 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8548 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8549
8550 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8551 * without having this check the calculations below to validate if the roi
8552 * is inside scalar crop region will fail resulting in the roi not being
8553 * reset causing algorithm to continue to use stale roi window
8554 */
8555 if (roi->weight == 0) {
8556 return true;
8557 }
8558
8559 if ((roi_x_max < scalerCropRegion->left) ||
8560 // right edge of roi window is left of scalar crop's left edge
8561 (roi_y_max < scalerCropRegion->top) ||
8562 // bottom edge of roi window is above scalar crop's top edge
8563 (roi->rect.left > crop_x_max) ||
8564 // left edge of roi window is beyond(right) of scalar crop's right edge
8565 (roi->rect.top > crop_y_max)){
8566 // top edge of roi windo is above scalar crop's top edge
8567 return false;
8568 }
8569 if (roi->rect.left < scalerCropRegion->left) {
8570 roi->rect.left = scalerCropRegion->left;
8571 }
8572 if (roi->rect.top < scalerCropRegion->top) {
8573 roi->rect.top = scalerCropRegion->top;
8574 }
8575 if (roi_x_max > crop_x_max) {
8576 roi_x_max = crop_x_max;
8577 }
8578 if (roi_y_max > crop_y_max) {
8579 roi_y_max = crop_y_max;
8580 }
8581 roi->rect.width = roi_x_max - roi->rect.left;
8582 roi->rect.height = roi_y_max - roi->rect.top;
8583 return true;
8584}
8585
8586/*===========================================================================
8587 * FUNCTION : convertLandmarks
8588 *
8589 * DESCRIPTION: helper method to extract the landmarks from face detection info
8590 *
8591 * PARAMETERS :
8592 * @landmark_data : input landmark data to be converted
8593 * @landmarks : int32_t destination array
8594 *
8595 *
8596 *==========================================================================*/
8597void QCamera3HardwareInterface::convertLandmarks(
8598 cam_face_landmarks_info_t landmark_data,
8599 int32_t *landmarks)
8600{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008601 if (landmark_data.is_left_eye_valid) {
8602 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8603 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8604 } else {
8605 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8606 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8607 }
8608
8609 if (landmark_data.is_right_eye_valid) {
8610 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8611 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8612 } else {
8613 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8614 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8615 }
8616
8617 if (landmark_data.is_mouth_valid) {
8618 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8619 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8620 } else {
8621 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8622 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8623 }
8624}
8625
8626/*===========================================================================
8627 * FUNCTION : setInvalidLandmarks
8628 *
8629 * DESCRIPTION: helper method to set invalid landmarks
8630 *
8631 * PARAMETERS :
8632 * @landmarks : int32_t destination array
8633 *
8634 *
8635 *==========================================================================*/
8636void QCamera3HardwareInterface::setInvalidLandmarks(
8637 int32_t *landmarks)
8638{
8639 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8640 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8641 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8642 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8643 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8644 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008645}
8646
8647#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008648
8649/*===========================================================================
8650 * FUNCTION : getCapabilities
8651 *
8652 * DESCRIPTION: query camera capability from back-end
8653 *
8654 * PARAMETERS :
8655 * @ops : mm-interface ops structure
8656 * @cam_handle : camera handle for which we need capability
8657 *
8658 * RETURN : ptr type of capability structure
8659 * capability for success
8660 * NULL for failure
8661 *==========================================================================*/
8662cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8663 uint32_t cam_handle)
8664{
8665 int rc = NO_ERROR;
8666 QCamera3HeapMemory *capabilityHeap = NULL;
8667 cam_capability_t *cap_ptr = NULL;
8668
8669 if (ops == NULL) {
8670 LOGE("Invalid arguments");
8671 return NULL;
8672 }
8673
8674 capabilityHeap = new QCamera3HeapMemory(1);
8675 if (capabilityHeap == NULL) {
8676 LOGE("creation of capabilityHeap failed");
8677 return NULL;
8678 }
8679
8680 /* Allocate memory for capability buffer */
8681 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8682 if(rc != OK) {
8683 LOGE("No memory for cappability");
8684 goto allocate_failed;
8685 }
8686
8687 /* Map memory for capability buffer */
8688 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8689
8690 rc = ops->map_buf(cam_handle,
8691 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8692 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8693 if(rc < 0) {
8694 LOGE("failed to map capability buffer");
8695 rc = FAILED_TRANSACTION;
8696 goto map_failed;
8697 }
8698
8699 /* Query Capability */
8700 rc = ops->query_capability(cam_handle);
8701 if(rc < 0) {
8702 LOGE("failed to query capability");
8703 rc = FAILED_TRANSACTION;
8704 goto query_failed;
8705 }
8706
8707 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8708 if (cap_ptr == NULL) {
8709 LOGE("out of memory");
8710 rc = NO_MEMORY;
8711 goto query_failed;
8712 }
8713
8714 memset(cap_ptr, 0, sizeof(cam_capability_t));
8715 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8716
8717 int index;
8718 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8719 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8720 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8721 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8722 }
8723
8724query_failed:
8725 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8726map_failed:
8727 capabilityHeap->deallocate();
8728allocate_failed:
8729 delete capabilityHeap;
8730
8731 if (rc != NO_ERROR) {
8732 return NULL;
8733 } else {
8734 return cap_ptr;
8735 }
8736}
8737
Thierry Strudel3d639192016-09-09 11:52:26 -07008738/*===========================================================================
8739 * FUNCTION : initCapabilities
8740 *
8741 * DESCRIPTION: initialize camera capabilities in static data struct
8742 *
8743 * PARAMETERS :
8744 * @cameraId : camera Id
8745 *
8746 * RETURN : int32_t type of status
8747 * NO_ERROR -- success
8748 * none-zero failure code
8749 *==========================================================================*/
8750int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8751{
8752 int rc = 0;
8753 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008754 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008755
8756 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8757 if (rc) {
8758 LOGE("camera_open failed. rc = %d", rc);
8759 goto open_failed;
8760 }
8761 if (!cameraHandle) {
8762 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8763 goto open_failed;
8764 }
8765
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008766 handle = get_main_camera_handle(cameraHandle->camera_handle);
8767 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8768 if (gCamCapability[cameraId] == NULL) {
8769 rc = FAILED_TRANSACTION;
8770 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008771 }
8772
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008773 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008774 if (is_dual_camera_by_idx(cameraId)) {
8775 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8776 gCamCapability[cameraId]->aux_cam_cap =
8777 getCapabilities(cameraHandle->ops, handle);
8778 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8779 rc = FAILED_TRANSACTION;
8780 free(gCamCapability[cameraId]);
8781 goto failed_op;
8782 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008783
8784 // Copy the main camera capability to main_cam_cap struct
8785 gCamCapability[cameraId]->main_cam_cap =
8786 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8787 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8788 LOGE("out of memory");
8789 rc = NO_MEMORY;
8790 goto failed_op;
8791 }
8792 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8793 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008794 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008795failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008796 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8797 cameraHandle = NULL;
8798open_failed:
8799 return rc;
8800}
8801
8802/*==========================================================================
8803 * FUNCTION : get3Aversion
8804 *
8805 * DESCRIPTION: get the Q3A S/W version
8806 *
8807 * PARAMETERS :
8808 * @sw_version: Reference of Q3A structure which will hold version info upon
8809 * return
8810 *
8811 * RETURN : None
8812 *
8813 *==========================================================================*/
8814void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8815{
8816 if(gCamCapability[mCameraId])
8817 sw_version = gCamCapability[mCameraId]->q3a_version;
8818 else
8819 LOGE("Capability structure NULL!");
8820}
8821
8822
8823/*===========================================================================
8824 * FUNCTION : initParameters
8825 *
8826 * DESCRIPTION: initialize camera parameters
8827 *
8828 * PARAMETERS :
8829 *
8830 * RETURN : int32_t type of status
8831 * NO_ERROR -- success
8832 * none-zero failure code
8833 *==========================================================================*/
8834int QCamera3HardwareInterface::initParameters()
8835{
8836 int rc = 0;
8837
8838 //Allocate Set Param Buffer
8839 mParamHeap = new QCamera3HeapMemory(1);
8840 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8841 if(rc != OK) {
8842 rc = NO_MEMORY;
8843 LOGE("Failed to allocate SETPARM Heap memory");
8844 delete mParamHeap;
8845 mParamHeap = NULL;
8846 return rc;
8847 }
8848
8849 //Map memory for parameters buffer
8850 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8851 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8852 mParamHeap->getFd(0),
8853 sizeof(metadata_buffer_t),
8854 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8855 if(rc < 0) {
8856 LOGE("failed to map SETPARM buffer");
8857 rc = FAILED_TRANSACTION;
8858 mParamHeap->deallocate();
8859 delete mParamHeap;
8860 mParamHeap = NULL;
8861 return rc;
8862 }
8863
8864 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8865
8866 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8867 return rc;
8868}
8869
8870/*===========================================================================
8871 * FUNCTION : deinitParameters
8872 *
8873 * DESCRIPTION: de-initialize camera parameters
8874 *
8875 * PARAMETERS :
8876 *
8877 * RETURN : NONE
8878 *==========================================================================*/
8879void QCamera3HardwareInterface::deinitParameters()
8880{
8881 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8882 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8883
8884 mParamHeap->deallocate();
8885 delete mParamHeap;
8886 mParamHeap = NULL;
8887
8888 mParameters = NULL;
8889
8890 free(mPrevParameters);
8891 mPrevParameters = NULL;
8892}
8893
8894/*===========================================================================
8895 * FUNCTION : calcMaxJpegSize
8896 *
8897 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8898 *
8899 * PARAMETERS :
8900 *
8901 * RETURN : max_jpeg_size
8902 *==========================================================================*/
8903size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8904{
8905 size_t max_jpeg_size = 0;
8906 size_t temp_width, temp_height;
8907 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8908 MAX_SIZES_CNT);
8909 for (size_t i = 0; i < count; i++) {
8910 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8911 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8912 if (temp_width * temp_height > max_jpeg_size ) {
8913 max_jpeg_size = temp_width * temp_height;
8914 }
8915 }
8916 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8917 return max_jpeg_size;
8918}
8919
8920/*===========================================================================
8921 * FUNCTION : getMaxRawSize
8922 *
8923 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8924 *
8925 * PARAMETERS :
8926 *
8927 * RETURN : Largest supported Raw Dimension
8928 *==========================================================================*/
8929cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8930{
8931 int max_width = 0;
8932 cam_dimension_t maxRawSize;
8933
8934 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8935 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8936 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8937 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8938 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8939 }
8940 }
8941 return maxRawSize;
8942}
8943
8944
8945/*===========================================================================
8946 * FUNCTION : calcMaxJpegDim
8947 *
8948 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8949 *
8950 * PARAMETERS :
8951 *
8952 * RETURN : max_jpeg_dim
8953 *==========================================================================*/
8954cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8955{
8956 cam_dimension_t max_jpeg_dim;
8957 cam_dimension_t curr_jpeg_dim;
8958 max_jpeg_dim.width = 0;
8959 max_jpeg_dim.height = 0;
8960 curr_jpeg_dim.width = 0;
8961 curr_jpeg_dim.height = 0;
8962 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8963 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8964 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8965 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8966 max_jpeg_dim.width * max_jpeg_dim.height ) {
8967 max_jpeg_dim.width = curr_jpeg_dim.width;
8968 max_jpeg_dim.height = curr_jpeg_dim.height;
8969 }
8970 }
8971 return max_jpeg_dim;
8972}
8973
8974/*===========================================================================
8975 * FUNCTION : addStreamConfig
8976 *
8977 * DESCRIPTION: adds the stream configuration to the array
8978 *
8979 * PARAMETERS :
8980 * @available_stream_configs : pointer to stream configuration array
8981 * @scalar_format : scalar format
8982 * @dim : configuration dimension
8983 * @config_type : input or output configuration type
8984 *
8985 * RETURN : NONE
8986 *==========================================================================*/
8987void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8988 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8989{
8990 available_stream_configs.add(scalar_format);
8991 available_stream_configs.add(dim.width);
8992 available_stream_configs.add(dim.height);
8993 available_stream_configs.add(config_type);
8994}
8995
8996/*===========================================================================
8997 * FUNCTION : suppportBurstCapture
8998 *
8999 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9000 *
9001 * PARAMETERS :
9002 * @cameraId : camera Id
9003 *
9004 * RETURN : true if camera supports BURST_CAPTURE
9005 * false otherwise
9006 *==========================================================================*/
9007bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9008{
9009 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9010 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9011 const int32_t highResWidth = 3264;
9012 const int32_t highResHeight = 2448;
9013
9014 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9015 // Maximum resolution images cannot be captured at >= 10fps
9016 // -> not supporting BURST_CAPTURE
9017 return false;
9018 }
9019
9020 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9021 // Maximum resolution images can be captured at >= 20fps
9022 // --> supporting BURST_CAPTURE
9023 return true;
9024 }
9025
9026 // Find the smallest highRes resolution, or largest resolution if there is none
9027 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9028 MAX_SIZES_CNT);
9029 size_t highRes = 0;
9030 while ((highRes + 1 < totalCnt) &&
9031 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9032 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9033 highResWidth * highResHeight)) {
9034 highRes++;
9035 }
9036 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9037 return true;
9038 } else {
9039 return false;
9040 }
9041}
9042
9043/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009044 * FUNCTION : getPDStatIndex
9045 *
9046 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9047 *
9048 * PARAMETERS :
9049 * @caps : camera capabilities
9050 *
9051 * RETURN : int32_t type
9052 * non-negative - on success
9053 * -1 - on failure
9054 *==========================================================================*/
9055int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9056 if (nullptr == caps) {
9057 return -1;
9058 }
9059
9060 uint32_t metaRawCount = caps->meta_raw_channel_count;
9061 int32_t ret = -1;
9062 for (size_t i = 0; i < metaRawCount; i++) {
9063 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9064 ret = i;
9065 break;
9066 }
9067 }
9068
9069 return ret;
9070}
9071
9072/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009073 * FUNCTION : initStaticMetadata
9074 *
9075 * DESCRIPTION: initialize the static metadata
9076 *
9077 * PARAMETERS :
9078 * @cameraId : camera Id
9079 *
9080 * RETURN : int32_t type of status
9081 * 0 -- success
9082 * non-zero failure code
9083 *==========================================================================*/
9084int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9085{
9086 int rc = 0;
9087 CameraMetadata staticInfo;
9088 size_t count = 0;
9089 bool limitedDevice = false;
9090 char prop[PROPERTY_VALUE_MAX];
9091 bool supportBurst = false;
9092
9093 supportBurst = supportBurstCapture(cameraId);
9094
9095 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9096 * guaranteed or if min fps of max resolution is less than 20 fps, its
9097 * advertised as limited device*/
9098 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9099 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9100 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9101 !supportBurst;
9102
9103 uint8_t supportedHwLvl = limitedDevice ?
9104 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009105#ifndef USE_HAL_3_3
9106 // LEVEL_3 - This device will support level 3.
9107 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9108#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009109 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009110#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009111
9112 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9113 &supportedHwLvl, 1);
9114
9115 bool facingBack = false;
9116 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9117 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9118 facingBack = true;
9119 }
9120 /*HAL 3 only*/
9121 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9122 &gCamCapability[cameraId]->min_focus_distance, 1);
9123
9124 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9125 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9126
9127 /*should be using focal lengths but sensor doesn't provide that info now*/
9128 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9129 &gCamCapability[cameraId]->focal_length,
9130 1);
9131
9132 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9133 gCamCapability[cameraId]->apertures,
9134 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9135
9136 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9137 gCamCapability[cameraId]->filter_densities,
9138 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9139
9140
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009141 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9142 size_t mode_count =
9143 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9144 for (size_t i = 0; i < mode_count; i++) {
9145 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9146 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009147 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009148 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009149
9150 int32_t lens_shading_map_size[] = {
9151 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9152 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9153 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9154 lens_shading_map_size,
9155 sizeof(lens_shading_map_size)/sizeof(int32_t));
9156
9157 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9158 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9159
9160 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9161 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9162
9163 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9164 &gCamCapability[cameraId]->max_frame_duration, 1);
9165
9166 camera_metadata_rational baseGainFactor = {
9167 gCamCapability[cameraId]->base_gain_factor.numerator,
9168 gCamCapability[cameraId]->base_gain_factor.denominator};
9169 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9170 &baseGainFactor, 1);
9171
9172 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9173 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9174
9175 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9176 gCamCapability[cameraId]->pixel_array_size.height};
9177 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9178 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9179
9180 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9181 gCamCapability[cameraId]->active_array_size.top,
9182 gCamCapability[cameraId]->active_array_size.width,
9183 gCamCapability[cameraId]->active_array_size.height};
9184 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9185 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9186
9187 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9188 &gCamCapability[cameraId]->white_level, 1);
9189
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009190 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9191 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9192 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009193 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009194 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009195
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009196#ifndef USE_HAL_3_3
9197 bool hasBlackRegions = false;
9198 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9199 LOGW("black_region_count: %d is bounded to %d",
9200 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9201 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9202 }
9203 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9204 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9205 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9206 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9207 }
9208 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9209 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9210 hasBlackRegions = true;
9211 }
9212#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009213 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9214 &gCamCapability[cameraId]->flash_charge_duration, 1);
9215
9216 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9217 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9218
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009219 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9220 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9221 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009222 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9223 &timestampSource, 1);
9224
Thierry Strudel54dc9782017-02-15 12:12:10 -08009225 //update histogram vendor data
9226 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009227 &gCamCapability[cameraId]->histogram_size, 1);
9228
Thierry Strudel54dc9782017-02-15 12:12:10 -08009229 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009230 &gCamCapability[cameraId]->max_histogram_count, 1);
9231
Shuzhen Wang14415f52016-11-16 18:26:18 -08009232 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9233 //so that app can request fewer number of bins than the maximum supported.
9234 std::vector<int32_t> histBins;
9235 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9236 histBins.push_back(maxHistBins);
9237 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9238 (maxHistBins & 0x1) == 0) {
9239 histBins.push_back(maxHistBins >> 1);
9240 maxHistBins >>= 1;
9241 }
9242 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9243 histBins.data(), histBins.size());
9244
Thierry Strudel3d639192016-09-09 11:52:26 -07009245 int32_t sharpness_map_size[] = {
9246 gCamCapability[cameraId]->sharpness_map_size.width,
9247 gCamCapability[cameraId]->sharpness_map_size.height};
9248
9249 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9250 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9251
9252 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9253 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9254
Emilian Peev0f3c3162017-03-15 12:57:46 +00009255 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9256 if (0 <= indexPD) {
9257 // Advertise PD stats data as part of the Depth capabilities
9258 int32_t depthWidth =
9259 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9260 int32_t depthHeight =
9261 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9262 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9263 assert(0 < depthSamplesCount);
9264 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9265 &depthSamplesCount, 1);
9266
9267 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9268 depthHeight,
9269 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9270 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9271 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9272 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9273 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9274
9275 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9276 depthHeight, 33333333,
9277 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9278 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9279 depthMinDuration,
9280 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9281
9282 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9283 depthHeight, 0,
9284 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9285 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9286 depthStallDuration,
9287 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9288
9289 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9290 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9291 }
9292
Thierry Strudel3d639192016-09-09 11:52:26 -07009293 int32_t scalar_formats[] = {
9294 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9295 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9296 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9297 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9298 HAL_PIXEL_FORMAT_RAW10,
9299 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009300 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9301 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9302 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009303
9304 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9305 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9306 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9307 count, MAX_SIZES_CNT, available_processed_sizes);
9308 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9309 available_processed_sizes, count * 2);
9310
9311 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9312 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9313 makeTable(gCamCapability[cameraId]->raw_dim,
9314 count, MAX_SIZES_CNT, available_raw_sizes);
9315 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9316 available_raw_sizes, count * 2);
9317
9318 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9319 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9320 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9321 count, MAX_SIZES_CNT, available_fps_ranges);
9322 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9323 available_fps_ranges, count * 2);
9324
9325 camera_metadata_rational exposureCompensationStep = {
9326 gCamCapability[cameraId]->exp_compensation_step.numerator,
9327 gCamCapability[cameraId]->exp_compensation_step.denominator};
9328 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9329 &exposureCompensationStep, 1);
9330
9331 Vector<uint8_t> availableVstabModes;
9332 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9333 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009334 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009335 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009336 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009337 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009338 count = IS_TYPE_MAX;
9339 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9340 for (size_t i = 0; i < count; i++) {
9341 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9342 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9343 eisSupported = true;
9344 break;
9345 }
9346 }
9347 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009348 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9349 }
9350 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9351 availableVstabModes.array(), availableVstabModes.size());
9352
9353 /*HAL 1 and HAL 3 common*/
9354 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9355 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9356 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009357 // Cap the max zoom to the max preferred value
9358 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009359 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9360 &maxZoom, 1);
9361
9362 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9363 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9364
9365 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9366 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9367 max3aRegions[2] = 0; /* AF not supported */
9368 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9369 max3aRegions, 3);
9370
9371 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9372 memset(prop, 0, sizeof(prop));
9373 property_get("persist.camera.facedetect", prop, "1");
9374 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9375 LOGD("Support face detection mode: %d",
9376 supportedFaceDetectMode);
9377
9378 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009379 /* support mode should be OFF if max number of face is 0 */
9380 if (maxFaces <= 0) {
9381 supportedFaceDetectMode = 0;
9382 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009383 Vector<uint8_t> availableFaceDetectModes;
9384 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9385 if (supportedFaceDetectMode == 1) {
9386 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9387 } else if (supportedFaceDetectMode == 2) {
9388 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9389 } else if (supportedFaceDetectMode == 3) {
9390 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9391 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9392 } else {
9393 maxFaces = 0;
9394 }
9395 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9396 availableFaceDetectModes.array(),
9397 availableFaceDetectModes.size());
9398 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9399 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009400 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9401 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9402 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009403
9404 int32_t exposureCompensationRange[] = {
9405 gCamCapability[cameraId]->exposure_compensation_min,
9406 gCamCapability[cameraId]->exposure_compensation_max};
9407 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9408 exposureCompensationRange,
9409 sizeof(exposureCompensationRange)/sizeof(int32_t));
9410
9411 uint8_t lensFacing = (facingBack) ?
9412 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9413 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9414
9415 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9416 available_thumbnail_sizes,
9417 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9418
9419 /*all sizes will be clubbed into this tag*/
9420 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9421 /*android.scaler.availableStreamConfigurations*/
9422 Vector<int32_t> available_stream_configs;
9423 cam_dimension_t active_array_dim;
9424 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9425 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009426
9427 /*advertise list of input dimensions supported based on below property.
9428 By default all sizes upto 5MP will be advertised.
9429 Note that the setprop resolution format should be WxH.
9430 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9431 To list all supported sizes, setprop needs to be set with "0x0" */
9432 cam_dimension_t minInputSize = {2592,1944}; //5MP
9433 memset(prop, 0, sizeof(prop));
9434 property_get("persist.camera.input.minsize", prop, "2592x1944");
9435 if (strlen(prop) > 0) {
9436 char *saveptr = NULL;
9437 char *token = strtok_r(prop, "x", &saveptr);
9438 if (token != NULL) {
9439 minInputSize.width = atoi(token);
9440 }
9441 token = strtok_r(NULL, "x", &saveptr);
9442 if (token != NULL) {
9443 minInputSize.height = atoi(token);
9444 }
9445 }
9446
Thierry Strudel3d639192016-09-09 11:52:26 -07009447 /* Add input/output stream configurations for each scalar formats*/
9448 for (size_t j = 0; j < scalar_formats_count; j++) {
9449 switch (scalar_formats[j]) {
9450 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9451 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9452 case HAL_PIXEL_FORMAT_RAW10:
9453 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9454 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9455 addStreamConfig(available_stream_configs, scalar_formats[j],
9456 gCamCapability[cameraId]->raw_dim[i],
9457 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9458 }
9459 break;
9460 case HAL_PIXEL_FORMAT_BLOB:
9461 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9462 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9463 addStreamConfig(available_stream_configs, scalar_formats[j],
9464 gCamCapability[cameraId]->picture_sizes_tbl[i],
9465 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9466 }
9467 break;
9468 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9469 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9470 default:
9471 cam_dimension_t largest_picture_size;
9472 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9473 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9474 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9475 addStreamConfig(available_stream_configs, scalar_formats[j],
9476 gCamCapability[cameraId]->picture_sizes_tbl[i],
9477 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009478 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9479 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9480 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9481 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9482 >= minInputSize.width) || (gCamCapability[cameraId]->
9483 picture_sizes_tbl[i].height >= minInputSize.height)) {
9484 addStreamConfig(available_stream_configs, scalar_formats[j],
9485 gCamCapability[cameraId]->picture_sizes_tbl[i],
9486 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9487 }
9488 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009489 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009490
Thierry Strudel3d639192016-09-09 11:52:26 -07009491 break;
9492 }
9493 }
9494
9495 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9496 available_stream_configs.array(), available_stream_configs.size());
9497 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9498 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9499
9500 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9501 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9502
9503 /* android.scaler.availableMinFrameDurations */
9504 Vector<int64_t> available_min_durations;
9505 for (size_t j = 0; j < scalar_formats_count; j++) {
9506 switch (scalar_formats[j]) {
9507 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9508 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9509 case HAL_PIXEL_FORMAT_RAW10:
9510 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9511 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9512 available_min_durations.add(scalar_formats[j]);
9513 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9514 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9515 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9516 }
9517 break;
9518 default:
9519 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9520 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9521 available_min_durations.add(scalar_formats[j]);
9522 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9523 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9524 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9525 }
9526 break;
9527 }
9528 }
9529 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9530 available_min_durations.array(), available_min_durations.size());
9531
9532 Vector<int32_t> available_hfr_configs;
9533 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9534 int32_t fps = 0;
9535 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9536 case CAM_HFR_MODE_60FPS:
9537 fps = 60;
9538 break;
9539 case CAM_HFR_MODE_90FPS:
9540 fps = 90;
9541 break;
9542 case CAM_HFR_MODE_120FPS:
9543 fps = 120;
9544 break;
9545 case CAM_HFR_MODE_150FPS:
9546 fps = 150;
9547 break;
9548 case CAM_HFR_MODE_180FPS:
9549 fps = 180;
9550 break;
9551 case CAM_HFR_MODE_210FPS:
9552 fps = 210;
9553 break;
9554 case CAM_HFR_MODE_240FPS:
9555 fps = 240;
9556 break;
9557 case CAM_HFR_MODE_480FPS:
9558 fps = 480;
9559 break;
9560 case CAM_HFR_MODE_OFF:
9561 case CAM_HFR_MODE_MAX:
9562 default:
9563 break;
9564 }
9565
9566 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9567 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9568 /* For each HFR frame rate, need to advertise one variable fps range
9569 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9570 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9571 * set by the app. When video recording is started, [120, 120] is
9572 * set. This way sensor configuration does not change when recording
9573 * is started */
9574
9575 /* (width, height, fps_min, fps_max, batch_size_max) */
9576 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9577 j < MAX_SIZES_CNT; j++) {
9578 available_hfr_configs.add(
9579 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9580 available_hfr_configs.add(
9581 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9582 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9583 available_hfr_configs.add(fps);
9584 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9585
9586 /* (width, height, fps_min, fps_max, batch_size_max) */
9587 available_hfr_configs.add(
9588 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9589 available_hfr_configs.add(
9590 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9591 available_hfr_configs.add(fps);
9592 available_hfr_configs.add(fps);
9593 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9594 }
9595 }
9596 }
9597 //Advertise HFR capability only if the property is set
9598 memset(prop, 0, sizeof(prop));
9599 property_get("persist.camera.hal3hfr.enable", prop, "1");
9600 uint8_t hfrEnable = (uint8_t)atoi(prop);
9601
9602 if(hfrEnable && available_hfr_configs.array()) {
9603 staticInfo.update(
9604 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9605 available_hfr_configs.array(), available_hfr_configs.size());
9606 }
9607
9608 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9609 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9610 &max_jpeg_size, 1);
9611
9612 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9613 size_t size = 0;
9614 count = CAM_EFFECT_MODE_MAX;
9615 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9616 for (size_t i = 0; i < count; i++) {
9617 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9618 gCamCapability[cameraId]->supported_effects[i]);
9619 if (NAME_NOT_FOUND != val) {
9620 avail_effects[size] = (uint8_t)val;
9621 size++;
9622 }
9623 }
9624 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9625 avail_effects,
9626 size);
9627
9628 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9629 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9630 size_t supported_scene_modes_cnt = 0;
9631 count = CAM_SCENE_MODE_MAX;
9632 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9633 for (size_t i = 0; i < count; i++) {
9634 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9635 CAM_SCENE_MODE_OFF) {
9636 int val = lookupFwkName(SCENE_MODES_MAP,
9637 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9638 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009639
Thierry Strudel3d639192016-09-09 11:52:26 -07009640 if (NAME_NOT_FOUND != val) {
9641 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9642 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9643 supported_scene_modes_cnt++;
9644 }
9645 }
9646 }
9647 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9648 avail_scene_modes,
9649 supported_scene_modes_cnt);
9650
9651 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9652 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9653 supported_scene_modes_cnt,
9654 CAM_SCENE_MODE_MAX,
9655 scene_mode_overrides,
9656 supported_indexes,
9657 cameraId);
9658
9659 if (supported_scene_modes_cnt == 0) {
9660 supported_scene_modes_cnt = 1;
9661 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9662 }
9663
9664 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9665 scene_mode_overrides, supported_scene_modes_cnt * 3);
9666
9667 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9668 ANDROID_CONTROL_MODE_AUTO,
9669 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9670 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9671 available_control_modes,
9672 3);
9673
9674 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9675 size = 0;
9676 count = CAM_ANTIBANDING_MODE_MAX;
9677 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9678 for (size_t i = 0; i < count; i++) {
9679 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9680 gCamCapability[cameraId]->supported_antibandings[i]);
9681 if (NAME_NOT_FOUND != val) {
9682 avail_antibanding_modes[size] = (uint8_t)val;
9683 size++;
9684 }
9685
9686 }
9687 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9688 avail_antibanding_modes,
9689 size);
9690
9691 uint8_t avail_abberation_modes[] = {
9692 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9693 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9694 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9695 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9696 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9697 if (0 == count) {
9698 // If no aberration correction modes are available for a device, this advertise OFF mode
9699 size = 1;
9700 } else {
9701 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9702 // So, advertize all 3 modes if atleast any one mode is supported as per the
9703 // new M requirement
9704 size = 3;
9705 }
9706 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9707 avail_abberation_modes,
9708 size);
9709
9710 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9711 size = 0;
9712 count = CAM_FOCUS_MODE_MAX;
9713 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9714 for (size_t i = 0; i < count; i++) {
9715 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9716 gCamCapability[cameraId]->supported_focus_modes[i]);
9717 if (NAME_NOT_FOUND != val) {
9718 avail_af_modes[size] = (uint8_t)val;
9719 size++;
9720 }
9721 }
9722 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9723 avail_af_modes,
9724 size);
9725
9726 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9727 size = 0;
9728 count = CAM_WB_MODE_MAX;
9729 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9730 for (size_t i = 0; i < count; i++) {
9731 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9732 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9733 gCamCapability[cameraId]->supported_white_balances[i]);
9734 if (NAME_NOT_FOUND != val) {
9735 avail_awb_modes[size] = (uint8_t)val;
9736 size++;
9737 }
9738 }
9739 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9740 avail_awb_modes,
9741 size);
9742
9743 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9744 count = CAM_FLASH_FIRING_LEVEL_MAX;
9745 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9746 count);
9747 for (size_t i = 0; i < count; i++) {
9748 available_flash_levels[i] =
9749 gCamCapability[cameraId]->supported_firing_levels[i];
9750 }
9751 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9752 available_flash_levels, count);
9753
9754 uint8_t flashAvailable;
9755 if (gCamCapability[cameraId]->flash_available)
9756 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9757 else
9758 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9759 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9760 &flashAvailable, 1);
9761
9762 Vector<uint8_t> avail_ae_modes;
9763 count = CAM_AE_MODE_MAX;
9764 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9765 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009766 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9767 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9768 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9769 }
9770 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009771 }
9772 if (flashAvailable) {
9773 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9774 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9775 }
9776 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9777 avail_ae_modes.array(),
9778 avail_ae_modes.size());
9779
9780 int32_t sensitivity_range[2];
9781 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9782 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9783 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9784 sensitivity_range,
9785 sizeof(sensitivity_range) / sizeof(int32_t));
9786
9787 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9788 &gCamCapability[cameraId]->max_analog_sensitivity,
9789 1);
9790
9791 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9792 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9793 &sensor_orientation,
9794 1);
9795
9796 int32_t max_output_streams[] = {
9797 MAX_STALLING_STREAMS,
9798 MAX_PROCESSED_STREAMS,
9799 MAX_RAW_STREAMS};
9800 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9801 max_output_streams,
9802 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9803
9804 uint8_t avail_leds = 0;
9805 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9806 &avail_leds, 0);
9807
9808 uint8_t focus_dist_calibrated;
9809 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9810 gCamCapability[cameraId]->focus_dist_calibrated);
9811 if (NAME_NOT_FOUND != val) {
9812 focus_dist_calibrated = (uint8_t)val;
9813 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9814 &focus_dist_calibrated, 1);
9815 }
9816
9817 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9818 size = 0;
9819 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9820 MAX_TEST_PATTERN_CNT);
9821 for (size_t i = 0; i < count; i++) {
9822 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9823 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9824 if (NAME_NOT_FOUND != testpatternMode) {
9825 avail_testpattern_modes[size] = testpatternMode;
9826 size++;
9827 }
9828 }
9829 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9830 avail_testpattern_modes,
9831 size);
9832
9833 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9834 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9835 &max_pipeline_depth,
9836 1);
9837
9838 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9839 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9840 &partial_result_count,
9841 1);
9842
9843 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9844 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9845
9846 Vector<uint8_t> available_capabilities;
9847 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9848 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9849 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9850 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9851 if (supportBurst) {
9852 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9853 }
9854 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9855 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9856 if (hfrEnable && available_hfr_configs.array()) {
9857 available_capabilities.add(
9858 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9859 }
9860
9861 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9862 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9863 }
9864 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9865 available_capabilities.array(),
9866 available_capabilities.size());
9867
9868 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9869 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9870 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9871 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9872
9873 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9874 &aeLockAvailable, 1);
9875
9876 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9877 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9878 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9879 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9880
9881 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9882 &awbLockAvailable, 1);
9883
9884 int32_t max_input_streams = 1;
9885 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9886 &max_input_streams,
9887 1);
9888
9889 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9890 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9891 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9892 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9893 HAL_PIXEL_FORMAT_YCbCr_420_888};
9894 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9895 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9896
9897 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9898 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9899 &max_latency,
9900 1);
9901
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009902#ifndef USE_HAL_3_3
9903 int32_t isp_sensitivity_range[2];
9904 isp_sensitivity_range[0] =
9905 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9906 isp_sensitivity_range[1] =
9907 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9908 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9909 isp_sensitivity_range,
9910 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9911#endif
9912
Thierry Strudel3d639192016-09-09 11:52:26 -07009913 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9914 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9915 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9916 available_hot_pixel_modes,
9917 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9918
9919 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9920 ANDROID_SHADING_MODE_FAST,
9921 ANDROID_SHADING_MODE_HIGH_QUALITY};
9922 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9923 available_shading_modes,
9924 3);
9925
9926 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9927 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9928 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9929 available_lens_shading_map_modes,
9930 2);
9931
9932 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9933 ANDROID_EDGE_MODE_FAST,
9934 ANDROID_EDGE_MODE_HIGH_QUALITY,
9935 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9936 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9937 available_edge_modes,
9938 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9939
9940 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9941 ANDROID_NOISE_REDUCTION_MODE_FAST,
9942 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9943 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9944 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9945 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9946 available_noise_red_modes,
9947 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9948
9949 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9950 ANDROID_TONEMAP_MODE_FAST,
9951 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9952 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9953 available_tonemap_modes,
9954 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9955
9956 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9957 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9958 available_hot_pixel_map_modes,
9959 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9960
9961 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9962 gCamCapability[cameraId]->reference_illuminant1);
9963 if (NAME_NOT_FOUND != val) {
9964 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9965 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9966 }
9967
9968 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9969 gCamCapability[cameraId]->reference_illuminant2);
9970 if (NAME_NOT_FOUND != val) {
9971 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9972 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9973 }
9974
9975 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9976 (void *)gCamCapability[cameraId]->forward_matrix1,
9977 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9978
9979 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9980 (void *)gCamCapability[cameraId]->forward_matrix2,
9981 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9982
9983 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9984 (void *)gCamCapability[cameraId]->color_transform1,
9985 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9986
9987 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9988 (void *)gCamCapability[cameraId]->color_transform2,
9989 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9990
9991 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9992 (void *)gCamCapability[cameraId]->calibration_transform1,
9993 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9994
9995 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9996 (void *)gCamCapability[cameraId]->calibration_transform2,
9997 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9998
9999 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10000 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10001 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10002 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10003 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10004 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10005 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10006 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10007 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10008 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10009 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10010 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10011 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10012 ANDROID_JPEG_GPS_COORDINATES,
10013 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10014 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10015 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10016 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10017 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10018 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10019 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10020 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10021 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10022 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010023#ifndef USE_HAL_3_3
10024 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10025#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010026 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010027 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010028 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10029 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010030 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010031 /* DevCamDebug metadata request_keys_basic */
10032 DEVCAMDEBUG_META_ENABLE,
10033 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010034 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010035 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010036 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010037 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Samuel Ha68ba5172016-12-15 18:41:12 -080010038 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010039
10040 size_t request_keys_cnt =
10041 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10042 Vector<int32_t> available_request_keys;
10043 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10044 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10045 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10046 }
10047
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010048 if (gExposeEnableZslKey) {
Chien-Yu Chened0a4c92017-05-01 18:25:03 +000010049 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010050 }
10051
Thierry Strudel3d639192016-09-09 11:52:26 -070010052 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10053 available_request_keys.array(), available_request_keys.size());
10054
10055 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10056 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10057 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10058 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10059 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10060 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10061 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10062 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10063 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10064 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10065 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10066 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10067 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10068 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10069 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10070 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10071 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010072 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010073 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10074 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10075 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010076 ANDROID_STATISTICS_FACE_SCORES,
10077#ifndef USE_HAL_3_3
10078 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10079#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010080 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010081 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010082 // DevCamDebug metadata result_keys_basic
10083 DEVCAMDEBUG_META_ENABLE,
10084 // DevCamDebug metadata result_keys AF
10085 DEVCAMDEBUG_AF_LENS_POSITION,
10086 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10087 DEVCAMDEBUG_AF_TOF_DISTANCE,
10088 DEVCAMDEBUG_AF_LUMA,
10089 DEVCAMDEBUG_AF_HAF_STATE,
10090 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10091 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10092 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10093 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10094 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10095 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10096 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10097 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10098 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10099 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10100 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10101 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10102 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10103 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10104 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10105 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10106 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10107 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10108 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10109 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10110 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10111 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10112 // DevCamDebug metadata result_keys AEC
10113 DEVCAMDEBUG_AEC_TARGET_LUMA,
10114 DEVCAMDEBUG_AEC_COMP_LUMA,
10115 DEVCAMDEBUG_AEC_AVG_LUMA,
10116 DEVCAMDEBUG_AEC_CUR_LUMA,
10117 DEVCAMDEBUG_AEC_LINECOUNT,
10118 DEVCAMDEBUG_AEC_REAL_GAIN,
10119 DEVCAMDEBUG_AEC_EXP_INDEX,
10120 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010121 // DevCamDebug metadata result_keys zzHDR
10122 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10123 DEVCAMDEBUG_AEC_L_LINECOUNT,
10124 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10125 DEVCAMDEBUG_AEC_S_LINECOUNT,
10126 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10127 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10128 // DevCamDebug metadata result_keys ADRC
10129 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10130 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10131 DEVCAMDEBUG_AEC_GTM_RATIO,
10132 DEVCAMDEBUG_AEC_LTM_RATIO,
10133 DEVCAMDEBUG_AEC_LA_RATIO,
10134 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010135 // DevCamDebug metadata result_keys AWB
10136 DEVCAMDEBUG_AWB_R_GAIN,
10137 DEVCAMDEBUG_AWB_G_GAIN,
10138 DEVCAMDEBUG_AWB_B_GAIN,
10139 DEVCAMDEBUG_AWB_CCT,
10140 DEVCAMDEBUG_AWB_DECISION,
10141 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010142 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10143 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10144 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010145 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010146 };
10147
Thierry Strudel3d639192016-09-09 11:52:26 -070010148 size_t result_keys_cnt =
10149 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10150
10151 Vector<int32_t> available_result_keys;
10152 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10153 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10154 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10155 }
10156 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10157 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10158 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10159 }
10160 if (supportedFaceDetectMode == 1) {
10161 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10162 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10163 } else if ((supportedFaceDetectMode == 2) ||
10164 (supportedFaceDetectMode == 3)) {
10165 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10166 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10167 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010168#ifndef USE_HAL_3_3
10169 if (hasBlackRegions) {
10170 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10171 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10172 }
10173#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010174
10175 if (gExposeEnableZslKey) {
10176 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10177 }
10178
Thierry Strudel3d639192016-09-09 11:52:26 -070010179 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10180 available_result_keys.array(), available_result_keys.size());
10181
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010182 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010183 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10184 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10185 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10186 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10187 ANDROID_SCALER_CROPPING_TYPE,
10188 ANDROID_SYNC_MAX_LATENCY,
10189 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10190 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10191 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10192 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10193 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10194 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10195 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10196 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10197 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10198 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10199 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10200 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10201 ANDROID_LENS_FACING,
10202 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10203 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10204 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10205 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10206 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10207 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10208 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10209 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10210 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10211 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10212 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10213 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10214 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10215 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10216 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10217 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10218 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10219 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10220 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10221 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010222 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010223 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10224 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10225 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10226 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10227 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10228 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10229 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10230 ANDROID_CONTROL_AVAILABLE_MODES,
10231 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10232 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10233 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10234 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010235 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10236#ifndef USE_HAL_3_3
10237 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10238 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10239#endif
10240 };
10241
10242 Vector<int32_t> available_characteristics_keys;
10243 available_characteristics_keys.appendArray(characteristics_keys_basic,
10244 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10245#ifndef USE_HAL_3_3
10246 if (hasBlackRegions) {
10247 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10248 }
10249#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010250
10251 if (0 <= indexPD) {
10252 int32_t depthKeys[] = {
10253 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10254 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10255 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10256 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10257 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10258 };
10259 available_characteristics_keys.appendArray(depthKeys,
10260 sizeof(depthKeys) / sizeof(depthKeys[0]));
10261 }
10262
Thierry Strudel3d639192016-09-09 11:52:26 -070010263 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010264 available_characteristics_keys.array(),
10265 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010266
10267 /*available stall durations depend on the hw + sw and will be different for different devices */
10268 /*have to add for raw after implementation*/
10269 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10270 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10271
10272 Vector<int64_t> available_stall_durations;
10273 for (uint32_t j = 0; j < stall_formats_count; j++) {
10274 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10275 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10276 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10277 available_stall_durations.add(stall_formats[j]);
10278 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10279 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10280 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10281 }
10282 } else {
10283 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10284 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10285 available_stall_durations.add(stall_formats[j]);
10286 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10287 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10288 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10289 }
10290 }
10291 }
10292 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10293 available_stall_durations.array(),
10294 available_stall_durations.size());
10295
10296 //QCAMERA3_OPAQUE_RAW
10297 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10298 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10299 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10300 case LEGACY_RAW:
10301 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10302 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10303 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10304 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10305 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10306 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10307 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10308 break;
10309 case MIPI_RAW:
10310 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10311 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10312 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10313 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10314 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10315 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10316 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10317 break;
10318 default:
10319 LOGE("unknown opaque_raw_format %d",
10320 gCamCapability[cameraId]->opaque_raw_fmt);
10321 break;
10322 }
10323 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10324
10325 Vector<int32_t> strides;
10326 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10327 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10328 cam_stream_buf_plane_info_t buf_planes;
10329 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10330 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10331 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10332 &gCamCapability[cameraId]->padding_info, &buf_planes);
10333 strides.add(buf_planes.plane_info.mp[0].stride);
10334 }
10335 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10336 strides.size());
10337
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010338 //TBD: remove the following line once backend advertises zzHDR in feature mask
10339 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010340 //Video HDR default
10341 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10342 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010343 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010344 int32_t vhdr_mode[] = {
10345 QCAMERA3_VIDEO_HDR_MODE_OFF,
10346 QCAMERA3_VIDEO_HDR_MODE_ON};
10347
10348 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10349 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10350 vhdr_mode, vhdr_mode_count);
10351 }
10352
Thierry Strudel3d639192016-09-09 11:52:26 -070010353 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10354 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10355 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10356
10357 uint8_t isMonoOnly =
10358 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10359 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10360 &isMonoOnly, 1);
10361
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010362#ifndef USE_HAL_3_3
10363 Vector<int32_t> opaque_size;
10364 for (size_t j = 0; j < scalar_formats_count; j++) {
10365 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10366 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10367 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10368 cam_stream_buf_plane_info_t buf_planes;
10369
10370 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10371 &gCamCapability[cameraId]->padding_info, &buf_planes);
10372
10373 if (rc == 0) {
10374 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10375 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10376 opaque_size.add(buf_planes.plane_info.frame_len);
10377 }else {
10378 LOGE("raw frame calculation failed!");
10379 }
10380 }
10381 }
10382 }
10383
10384 if ((opaque_size.size() > 0) &&
10385 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10386 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10387 else
10388 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10389#endif
10390
Thierry Strudel04e026f2016-10-10 11:27:36 -070010391 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10392 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10393 size = 0;
10394 count = CAM_IR_MODE_MAX;
10395 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10396 for (size_t i = 0; i < count; i++) {
10397 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10398 gCamCapability[cameraId]->supported_ir_modes[i]);
10399 if (NAME_NOT_FOUND != val) {
10400 avail_ir_modes[size] = (int32_t)val;
10401 size++;
10402 }
10403 }
10404 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10405 avail_ir_modes, size);
10406 }
10407
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010408 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10409 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10410 size = 0;
10411 count = CAM_AEC_CONVERGENCE_MAX;
10412 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10413 for (size_t i = 0; i < count; i++) {
10414 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10415 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10416 if (NAME_NOT_FOUND != val) {
10417 available_instant_aec_modes[size] = (int32_t)val;
10418 size++;
10419 }
10420 }
10421 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10422 available_instant_aec_modes, size);
10423 }
10424
Thierry Strudel54dc9782017-02-15 12:12:10 -080010425 int32_t sharpness_range[] = {
10426 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10427 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10428 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10429
10430 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10431 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10432 size = 0;
10433 count = CAM_BINNING_CORRECTION_MODE_MAX;
10434 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10435 for (size_t i = 0; i < count; i++) {
10436 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10437 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10438 gCamCapability[cameraId]->supported_binning_modes[i]);
10439 if (NAME_NOT_FOUND != val) {
10440 avail_binning_modes[size] = (int32_t)val;
10441 size++;
10442 }
10443 }
10444 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10445 avail_binning_modes, size);
10446 }
10447
10448 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10449 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10450 size = 0;
10451 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10452 for (size_t i = 0; i < count; i++) {
10453 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10454 gCamCapability[cameraId]->supported_aec_modes[i]);
10455 if (NAME_NOT_FOUND != val)
10456 available_aec_modes[size++] = val;
10457 }
10458 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10459 available_aec_modes, size);
10460 }
10461
10462 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10463 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10464 size = 0;
10465 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10466 for (size_t i = 0; i < count; i++) {
10467 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10468 gCamCapability[cameraId]->supported_iso_modes[i]);
10469 if (NAME_NOT_FOUND != val)
10470 available_iso_modes[size++] = val;
10471 }
10472 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10473 available_iso_modes, size);
10474 }
10475
10476 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010477 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010478 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10479 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10480 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10481
10482 int32_t available_saturation_range[4];
10483 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10484 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10485 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10486 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10487 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10488 available_saturation_range, 4);
10489
10490 uint8_t is_hdr_values[2];
10491 is_hdr_values[0] = 0;
10492 is_hdr_values[1] = 1;
10493 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10494 is_hdr_values, 2);
10495
10496 float is_hdr_confidence_range[2];
10497 is_hdr_confidence_range[0] = 0.0;
10498 is_hdr_confidence_range[1] = 1.0;
10499 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10500 is_hdr_confidence_range, 2);
10501
Emilian Peev0a972ef2017-03-16 10:25:53 +000010502 size_t eepromLength = strnlen(
10503 reinterpret_cast<const char *>(
10504 gCamCapability[cameraId]->eeprom_version_info),
10505 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10506 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010507 char easelInfo[] = ",E:N";
10508 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10509 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10510 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010511 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10512 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010513 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010514 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10515 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10516 }
10517
Thierry Strudel3d639192016-09-09 11:52:26 -070010518 gStaticMetadata[cameraId] = staticInfo.release();
10519 return rc;
10520}
10521
10522/*===========================================================================
10523 * FUNCTION : makeTable
10524 *
10525 * DESCRIPTION: make a table of sizes
10526 *
10527 * PARAMETERS :
10528 *
10529 *
10530 *==========================================================================*/
10531void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10532 size_t max_size, int32_t *sizeTable)
10533{
10534 size_t j = 0;
10535 if (size > max_size) {
10536 size = max_size;
10537 }
10538 for (size_t i = 0; i < size; i++) {
10539 sizeTable[j] = dimTable[i].width;
10540 sizeTable[j+1] = dimTable[i].height;
10541 j+=2;
10542 }
10543}
10544
10545/*===========================================================================
10546 * FUNCTION : makeFPSTable
10547 *
10548 * DESCRIPTION: make a table of fps ranges
10549 *
10550 * PARAMETERS :
10551 *
10552 *==========================================================================*/
10553void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10554 size_t max_size, int32_t *fpsRangesTable)
10555{
10556 size_t j = 0;
10557 if (size > max_size) {
10558 size = max_size;
10559 }
10560 for (size_t i = 0; i < size; i++) {
10561 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10562 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10563 j+=2;
10564 }
10565}
10566
10567/*===========================================================================
10568 * FUNCTION : makeOverridesList
10569 *
10570 * DESCRIPTION: make a list of scene mode overrides
10571 *
10572 * PARAMETERS :
10573 *
10574 *
10575 *==========================================================================*/
10576void QCamera3HardwareInterface::makeOverridesList(
10577 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10578 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10579{
10580 /*daemon will give a list of overrides for all scene modes.
10581 However we should send the fwk only the overrides for the scene modes
10582 supported by the framework*/
10583 size_t j = 0;
10584 if (size > max_size) {
10585 size = max_size;
10586 }
10587 size_t focus_count = CAM_FOCUS_MODE_MAX;
10588 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10589 focus_count);
10590 for (size_t i = 0; i < size; i++) {
10591 bool supt = false;
10592 size_t index = supported_indexes[i];
10593 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10594 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10595 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10596 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10597 overridesTable[index].awb_mode);
10598 if (NAME_NOT_FOUND != val) {
10599 overridesList[j+1] = (uint8_t)val;
10600 }
10601 uint8_t focus_override = overridesTable[index].af_mode;
10602 for (size_t k = 0; k < focus_count; k++) {
10603 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10604 supt = true;
10605 break;
10606 }
10607 }
10608 if (supt) {
10609 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10610 focus_override);
10611 if (NAME_NOT_FOUND != val) {
10612 overridesList[j+2] = (uint8_t)val;
10613 }
10614 } else {
10615 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10616 }
10617 j+=3;
10618 }
10619}
10620
10621/*===========================================================================
10622 * FUNCTION : filterJpegSizes
10623 *
10624 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10625 * could be downscaled to
10626 *
10627 * PARAMETERS :
10628 *
10629 * RETURN : length of jpegSizes array
10630 *==========================================================================*/
10631
10632size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10633 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10634 uint8_t downscale_factor)
10635{
10636 if (0 == downscale_factor) {
10637 downscale_factor = 1;
10638 }
10639
10640 int32_t min_width = active_array_size.width / downscale_factor;
10641 int32_t min_height = active_array_size.height / downscale_factor;
10642 size_t jpegSizesCnt = 0;
10643 if (processedSizesCnt > maxCount) {
10644 processedSizesCnt = maxCount;
10645 }
10646 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10647 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10648 jpegSizes[jpegSizesCnt] = processedSizes[i];
10649 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10650 jpegSizesCnt += 2;
10651 }
10652 }
10653 return jpegSizesCnt;
10654}
10655
10656/*===========================================================================
10657 * FUNCTION : computeNoiseModelEntryS
10658 *
10659 * DESCRIPTION: function to map a given sensitivity to the S noise
10660 * model parameters in the DNG noise model.
10661 *
10662 * PARAMETERS : sens : the sensor sensitivity
10663 *
10664 ** RETURN : S (sensor amplification) noise
10665 *
10666 *==========================================================================*/
10667double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10668 double s = gCamCapability[mCameraId]->gradient_S * sens +
10669 gCamCapability[mCameraId]->offset_S;
10670 return ((s < 0.0) ? 0.0 : s);
10671}
10672
10673/*===========================================================================
10674 * FUNCTION : computeNoiseModelEntryO
10675 *
10676 * DESCRIPTION: function to map a given sensitivity to the O noise
10677 * model parameters in the DNG noise model.
10678 *
10679 * PARAMETERS : sens : the sensor sensitivity
10680 *
10681 ** RETURN : O (sensor readout) noise
10682 *
10683 *==========================================================================*/
10684double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10685 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10686 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10687 1.0 : (1.0 * sens / max_analog_sens);
10688 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10689 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10690 return ((o < 0.0) ? 0.0 : o);
10691}
10692
10693/*===========================================================================
10694 * FUNCTION : getSensorSensitivity
10695 *
10696 * DESCRIPTION: convert iso_mode to an integer value
10697 *
10698 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10699 *
10700 ** RETURN : sensitivity supported by sensor
10701 *
10702 *==========================================================================*/
10703int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10704{
10705 int32_t sensitivity;
10706
10707 switch (iso_mode) {
10708 case CAM_ISO_MODE_100:
10709 sensitivity = 100;
10710 break;
10711 case CAM_ISO_MODE_200:
10712 sensitivity = 200;
10713 break;
10714 case CAM_ISO_MODE_400:
10715 sensitivity = 400;
10716 break;
10717 case CAM_ISO_MODE_800:
10718 sensitivity = 800;
10719 break;
10720 case CAM_ISO_MODE_1600:
10721 sensitivity = 1600;
10722 break;
10723 default:
10724 sensitivity = -1;
10725 break;
10726 }
10727 return sensitivity;
10728}
10729
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010730int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010731 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010732 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10733 // to connect to Easel.
10734 bool doNotpowerOnEasel =
10735 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10736
10737 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010738 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10739 return OK;
10740 }
10741
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010742 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010743 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010744 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010745 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010746 return res;
10747 }
10748
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010749 EaselManagerClientOpened = true;
10750
10751 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010752 if (res != OK) {
10753 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10754 }
10755
Chien-Yu Chen3d24f472017-05-01 18:24:14 +000010756 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010757 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010758
10759 // Expose enableZsl key only when HDR+ mode is enabled.
10760 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010761 }
10762
10763 return OK;
10764}
10765
Thierry Strudel3d639192016-09-09 11:52:26 -070010766/*===========================================================================
10767 * FUNCTION : getCamInfo
10768 *
10769 * DESCRIPTION: query camera capabilities
10770 *
10771 * PARAMETERS :
10772 * @cameraId : camera Id
10773 * @info : camera info struct to be filled in with camera capabilities
10774 *
10775 * RETURN : int type of status
10776 * NO_ERROR -- success
10777 * none-zero failure code
10778 *==========================================================================*/
10779int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10780 struct camera_info *info)
10781{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010782 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010783 int rc = 0;
10784
10785 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010786
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010787 {
10788 Mutex::Autolock l(gHdrPlusClientLock);
10789 rc = initHdrPlusClientLocked();
10790 if (rc != OK) {
10791 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10792 pthread_mutex_unlock(&gCamLock);
10793 return rc;
10794 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010795 }
10796
Thierry Strudel3d639192016-09-09 11:52:26 -070010797 if (NULL == gCamCapability[cameraId]) {
10798 rc = initCapabilities(cameraId);
10799 if (rc < 0) {
10800 pthread_mutex_unlock(&gCamLock);
10801 return rc;
10802 }
10803 }
10804
10805 if (NULL == gStaticMetadata[cameraId]) {
10806 rc = initStaticMetadata(cameraId);
10807 if (rc < 0) {
10808 pthread_mutex_unlock(&gCamLock);
10809 return rc;
10810 }
10811 }
10812
10813 switch(gCamCapability[cameraId]->position) {
10814 case CAM_POSITION_BACK:
10815 case CAM_POSITION_BACK_AUX:
10816 info->facing = CAMERA_FACING_BACK;
10817 break;
10818
10819 case CAM_POSITION_FRONT:
10820 case CAM_POSITION_FRONT_AUX:
10821 info->facing = CAMERA_FACING_FRONT;
10822 break;
10823
10824 default:
10825 LOGE("Unknown position type %d for camera id:%d",
10826 gCamCapability[cameraId]->position, cameraId);
10827 rc = -1;
10828 break;
10829 }
10830
10831
10832 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010833#ifndef USE_HAL_3_3
10834 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10835#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010836 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010837#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010838 info->static_camera_characteristics = gStaticMetadata[cameraId];
10839
10840 //For now assume both cameras can operate independently.
10841 info->conflicting_devices = NULL;
10842 info->conflicting_devices_length = 0;
10843
10844 //resource cost is 100 * MIN(1.0, m/M),
10845 //where m is throughput requirement with maximum stream configuration
10846 //and M is CPP maximum throughput.
10847 float max_fps = 0.0;
10848 for (uint32_t i = 0;
10849 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10850 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10851 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10852 }
10853 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10854 gCamCapability[cameraId]->active_array_size.width *
10855 gCamCapability[cameraId]->active_array_size.height * max_fps /
10856 gCamCapability[cameraId]->max_pixel_bandwidth;
10857 info->resource_cost = 100 * MIN(1.0, ratio);
10858 LOGI("camera %d resource cost is %d", cameraId,
10859 info->resource_cost);
10860
10861 pthread_mutex_unlock(&gCamLock);
10862 return rc;
10863}
10864
10865/*===========================================================================
10866 * FUNCTION : translateCapabilityToMetadata
10867 *
10868 * DESCRIPTION: translate the capability into camera_metadata_t
10869 *
10870 * PARAMETERS : type of the request
10871 *
10872 *
10873 * RETURN : success: camera_metadata_t*
10874 * failure: NULL
10875 *
10876 *==========================================================================*/
10877camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10878{
10879 if (mDefaultMetadata[type] != NULL) {
10880 return mDefaultMetadata[type];
10881 }
10882 //first time we are handling this request
10883 //fill up the metadata structure using the wrapper class
10884 CameraMetadata settings;
10885 //translate from cam_capability_t to camera_metadata_tag_t
10886 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10887 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10888 int32_t defaultRequestID = 0;
10889 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10890
10891 /* OIS disable */
10892 char ois_prop[PROPERTY_VALUE_MAX];
10893 memset(ois_prop, 0, sizeof(ois_prop));
10894 property_get("persist.camera.ois.disable", ois_prop, "0");
10895 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10896
10897 /* Force video to use OIS */
10898 char videoOisProp[PROPERTY_VALUE_MAX];
10899 memset(videoOisProp, 0, sizeof(videoOisProp));
10900 property_get("persist.camera.ois.video", videoOisProp, "1");
10901 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010902
10903 // Hybrid AE enable/disable
10904 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10905 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10906 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10907 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10908
Thierry Strudel3d639192016-09-09 11:52:26 -070010909 uint8_t controlIntent = 0;
10910 uint8_t focusMode;
10911 uint8_t vsMode;
10912 uint8_t optStabMode;
10913 uint8_t cacMode;
10914 uint8_t edge_mode;
10915 uint8_t noise_red_mode;
10916 uint8_t tonemap_mode;
10917 bool highQualityModeEntryAvailable = FALSE;
10918 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010919 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010920 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10921 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010922 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010923 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010924 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010925
Thierry Strudel3d639192016-09-09 11:52:26 -070010926 switch (type) {
10927 case CAMERA3_TEMPLATE_PREVIEW:
10928 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10929 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10930 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10931 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10932 edge_mode = ANDROID_EDGE_MODE_FAST;
10933 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10934 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10935 break;
10936 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10937 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10938 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10939 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10940 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10941 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10942 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10943 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10944 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10945 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10946 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10947 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10948 highQualityModeEntryAvailable = TRUE;
10949 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10950 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10951 fastModeEntryAvailable = TRUE;
10952 }
10953 }
10954 if (highQualityModeEntryAvailable) {
10955 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10956 } else if (fastModeEntryAvailable) {
10957 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10958 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010959 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10960 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10961 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010962 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010963 break;
10964 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10965 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10966 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10967 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010968 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10969 edge_mode = ANDROID_EDGE_MODE_FAST;
10970 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10971 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10972 if (forceVideoOis)
10973 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10974 break;
10975 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10976 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10977 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10978 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010979 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10980 edge_mode = ANDROID_EDGE_MODE_FAST;
10981 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10982 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10983 if (forceVideoOis)
10984 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10985 break;
10986 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10987 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10988 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10989 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10990 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10991 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10992 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10993 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10994 break;
10995 case CAMERA3_TEMPLATE_MANUAL:
10996 edge_mode = ANDROID_EDGE_MODE_FAST;
10997 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10998 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10999 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11000 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11001 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11002 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11003 break;
11004 default:
11005 edge_mode = ANDROID_EDGE_MODE_FAST;
11006 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11007 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11008 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11009 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11010 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11011 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11012 break;
11013 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011014 // Set CAC to OFF if underlying device doesn't support
11015 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11016 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11017 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011018 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11019 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11020 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11021 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11022 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11023 }
11024 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011025 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011026 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011027
11028 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11029 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11030 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11031 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11032 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11033 || ois_disable)
11034 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11035 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011036 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011037
11038 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11039 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11040
11041 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11042 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11043
11044 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11045 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11046
11047 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11048 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11049
11050 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11051 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11052
11053 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11054 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11055
11056 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11057 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11058
11059 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11060 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11061
11062 /*flash*/
11063 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11064 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11065
11066 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11067 settings.update(ANDROID_FLASH_FIRING_POWER,
11068 &flashFiringLevel, 1);
11069
11070 /* lens */
11071 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11072 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11073
11074 if (gCamCapability[mCameraId]->filter_densities_count) {
11075 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11076 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11077 gCamCapability[mCameraId]->filter_densities_count);
11078 }
11079
11080 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11081 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11082
Thierry Strudel3d639192016-09-09 11:52:26 -070011083 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11084 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11085
11086 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11087 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11088
11089 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11090 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11091
11092 /* face detection (default to OFF) */
11093 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11094 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11095
Thierry Strudel54dc9782017-02-15 12:12:10 -080011096 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11097 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011098
11099 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11100 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11101
11102 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11103 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11104
Thierry Strudel3d639192016-09-09 11:52:26 -070011105
11106 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11107 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11108
11109 /* Exposure time(Update the Min Exposure Time)*/
11110 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11111 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11112
11113 /* frame duration */
11114 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11115 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11116
11117 /* sensitivity */
11118 static const int32_t default_sensitivity = 100;
11119 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011120#ifndef USE_HAL_3_3
11121 static const int32_t default_isp_sensitivity =
11122 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11123 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11124#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011125
11126 /*edge mode*/
11127 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11128
11129 /*noise reduction mode*/
11130 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11131
11132 /*color correction mode*/
11133 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11134 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11135
11136 /*transform matrix mode*/
11137 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11138
11139 int32_t scaler_crop_region[4];
11140 scaler_crop_region[0] = 0;
11141 scaler_crop_region[1] = 0;
11142 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11143 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11144 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11145
11146 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11147 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11148
11149 /*focus distance*/
11150 float focus_distance = 0.0;
11151 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11152
11153 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011154 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011155 float max_range = 0.0;
11156 float max_fixed_fps = 0.0;
11157 int32_t fps_range[2] = {0, 0};
11158 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11159 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011160 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11161 TEMPLATE_MAX_PREVIEW_FPS) {
11162 continue;
11163 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011164 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11165 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11166 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11167 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11168 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11169 if (range > max_range) {
11170 fps_range[0] =
11171 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11172 fps_range[1] =
11173 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11174 max_range = range;
11175 }
11176 } else {
11177 if (range < 0.01 && max_fixed_fps <
11178 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11179 fps_range[0] =
11180 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11181 fps_range[1] =
11182 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11183 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11184 }
11185 }
11186 }
11187 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11188
11189 /*precapture trigger*/
11190 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11191 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11192
11193 /*af trigger*/
11194 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11195 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11196
11197 /* ae & af regions */
11198 int32_t active_region[] = {
11199 gCamCapability[mCameraId]->active_array_size.left,
11200 gCamCapability[mCameraId]->active_array_size.top,
11201 gCamCapability[mCameraId]->active_array_size.left +
11202 gCamCapability[mCameraId]->active_array_size.width,
11203 gCamCapability[mCameraId]->active_array_size.top +
11204 gCamCapability[mCameraId]->active_array_size.height,
11205 0};
11206 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11207 sizeof(active_region) / sizeof(active_region[0]));
11208 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11209 sizeof(active_region) / sizeof(active_region[0]));
11210
11211 /* black level lock */
11212 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11213 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11214
Thierry Strudel3d639192016-09-09 11:52:26 -070011215 //special defaults for manual template
11216 if (type == CAMERA3_TEMPLATE_MANUAL) {
11217 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11218 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11219
11220 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11221 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11222
11223 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11224 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11225
11226 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11227 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11228
11229 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11230 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11231
11232 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11233 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11234 }
11235
11236
11237 /* TNR
11238 * We'll use this location to determine which modes TNR will be set.
11239 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11240 * This is not to be confused with linking on a per stream basis that decision
11241 * is still on per-session basis and will be handled as part of config stream
11242 */
11243 uint8_t tnr_enable = 0;
11244
11245 if (m_bTnrPreview || m_bTnrVideo) {
11246
11247 switch (type) {
11248 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11249 tnr_enable = 1;
11250 break;
11251
11252 default:
11253 tnr_enable = 0;
11254 break;
11255 }
11256
11257 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11258 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11259 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11260
11261 LOGD("TNR:%d with process plate %d for template:%d",
11262 tnr_enable, tnr_process_type, type);
11263 }
11264
11265 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011266 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011267 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11268
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011269 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011270 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11271
Shuzhen Wang920ea402017-05-03 08:49:39 -070011272 uint8_t related_camera_id = mCameraId;
11273 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011274
11275 /* CDS default */
11276 char prop[PROPERTY_VALUE_MAX];
11277 memset(prop, 0, sizeof(prop));
11278 property_get("persist.camera.CDS", prop, "Auto");
11279 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11280 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11281 if (CAM_CDS_MODE_MAX == cds_mode) {
11282 cds_mode = CAM_CDS_MODE_AUTO;
11283 }
11284
11285 /* Disabling CDS in templates which have TNR enabled*/
11286 if (tnr_enable)
11287 cds_mode = CAM_CDS_MODE_OFF;
11288
11289 int32_t mode = cds_mode;
11290 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011291
Thierry Strudel269c81a2016-10-12 12:13:59 -070011292 /* Manual Convergence AEC Speed is disabled by default*/
11293 float default_aec_speed = 0;
11294 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11295
11296 /* Manual Convergence AWB Speed is disabled by default*/
11297 float default_awb_speed = 0;
11298 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11299
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011300 // Set instant AEC to normal convergence by default
11301 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11302 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11303
Shuzhen Wang19463d72016-03-08 11:09:52 -080011304 /* hybrid ae */
11305 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11306
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011307 if (gExposeEnableZslKey) {
11308 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11309 }
11310
Thierry Strudel3d639192016-09-09 11:52:26 -070011311 mDefaultMetadata[type] = settings.release();
11312
11313 return mDefaultMetadata[type];
11314}
11315
11316/*===========================================================================
11317 * FUNCTION : setFrameParameters
11318 *
11319 * DESCRIPTION: set parameters per frame as requested in the metadata from
11320 * framework
11321 *
11322 * PARAMETERS :
11323 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011324 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011325 * @blob_request: Whether this request is a blob request or not
11326 *
11327 * RETURN : success: NO_ERROR
11328 * failure:
11329 *==========================================================================*/
11330int QCamera3HardwareInterface::setFrameParameters(
11331 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011332 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011333 int blob_request,
11334 uint32_t snapshotStreamId)
11335{
11336 /*translate from camera_metadata_t type to parm_type_t*/
11337 int rc = 0;
11338 int32_t hal_version = CAM_HAL_V3;
11339
11340 clear_metadata_buffer(mParameters);
11341 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11342 LOGE("Failed to set hal version in the parameters");
11343 return BAD_VALUE;
11344 }
11345
11346 /*we need to update the frame number in the parameters*/
11347 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11348 request->frame_number)) {
11349 LOGE("Failed to set the frame number in the parameters");
11350 return BAD_VALUE;
11351 }
11352
11353 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011354 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011355 LOGE("Failed to set stream type mask in the parameters");
11356 return BAD_VALUE;
11357 }
11358
11359 if (mUpdateDebugLevel) {
11360 uint32_t dummyDebugLevel = 0;
11361 /* The value of dummyDebugLevel is irrelavent. On
11362 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11363 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11364 dummyDebugLevel)) {
11365 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11366 return BAD_VALUE;
11367 }
11368 mUpdateDebugLevel = false;
11369 }
11370
11371 if(request->settings != NULL){
11372 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11373 if (blob_request)
11374 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11375 }
11376
11377 return rc;
11378}
11379
11380/*===========================================================================
11381 * FUNCTION : setReprocParameters
11382 *
11383 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11384 * return it.
11385 *
11386 * PARAMETERS :
11387 * @request : request that needs to be serviced
11388 *
11389 * RETURN : success: NO_ERROR
11390 * failure:
11391 *==========================================================================*/
11392int32_t QCamera3HardwareInterface::setReprocParameters(
11393 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11394 uint32_t snapshotStreamId)
11395{
11396 /*translate from camera_metadata_t type to parm_type_t*/
11397 int rc = 0;
11398
11399 if (NULL == request->settings){
11400 LOGE("Reprocess settings cannot be NULL");
11401 return BAD_VALUE;
11402 }
11403
11404 if (NULL == reprocParam) {
11405 LOGE("Invalid reprocessing metadata buffer");
11406 return BAD_VALUE;
11407 }
11408 clear_metadata_buffer(reprocParam);
11409
11410 /*we need to update the frame number in the parameters*/
11411 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11412 request->frame_number)) {
11413 LOGE("Failed to set the frame number in the parameters");
11414 return BAD_VALUE;
11415 }
11416
11417 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11418 if (rc < 0) {
11419 LOGE("Failed to translate reproc request");
11420 return rc;
11421 }
11422
11423 CameraMetadata frame_settings;
11424 frame_settings = request->settings;
11425 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11426 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11427 int32_t *crop_count =
11428 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11429 int32_t *crop_data =
11430 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11431 int32_t *roi_map =
11432 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11433 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11434 cam_crop_data_t crop_meta;
11435 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11436 crop_meta.num_of_streams = 1;
11437 crop_meta.crop_info[0].crop.left = crop_data[0];
11438 crop_meta.crop_info[0].crop.top = crop_data[1];
11439 crop_meta.crop_info[0].crop.width = crop_data[2];
11440 crop_meta.crop_info[0].crop.height = crop_data[3];
11441
11442 crop_meta.crop_info[0].roi_map.left =
11443 roi_map[0];
11444 crop_meta.crop_info[0].roi_map.top =
11445 roi_map[1];
11446 crop_meta.crop_info[0].roi_map.width =
11447 roi_map[2];
11448 crop_meta.crop_info[0].roi_map.height =
11449 roi_map[3];
11450
11451 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11452 rc = BAD_VALUE;
11453 }
11454 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11455 request->input_buffer->stream,
11456 crop_meta.crop_info[0].crop.left,
11457 crop_meta.crop_info[0].crop.top,
11458 crop_meta.crop_info[0].crop.width,
11459 crop_meta.crop_info[0].crop.height);
11460 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11461 request->input_buffer->stream,
11462 crop_meta.crop_info[0].roi_map.left,
11463 crop_meta.crop_info[0].roi_map.top,
11464 crop_meta.crop_info[0].roi_map.width,
11465 crop_meta.crop_info[0].roi_map.height);
11466 } else {
11467 LOGE("Invalid reprocess crop count %d!", *crop_count);
11468 }
11469 } else {
11470 LOGE("No crop data from matching output stream");
11471 }
11472
11473 /* These settings are not needed for regular requests so handle them specially for
11474 reprocess requests; information needed for EXIF tags */
11475 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11476 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11477 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11478 if (NAME_NOT_FOUND != val) {
11479 uint32_t flashMode = (uint32_t)val;
11480 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11481 rc = BAD_VALUE;
11482 }
11483 } else {
11484 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11485 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11486 }
11487 } else {
11488 LOGH("No flash mode in reprocess settings");
11489 }
11490
11491 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11492 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11493 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11494 rc = BAD_VALUE;
11495 }
11496 } else {
11497 LOGH("No flash state in reprocess settings");
11498 }
11499
11500 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11501 uint8_t *reprocessFlags =
11502 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11503 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11504 *reprocessFlags)) {
11505 rc = BAD_VALUE;
11506 }
11507 }
11508
Thierry Strudel54dc9782017-02-15 12:12:10 -080011509 // Add exif debug data to internal metadata
11510 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11511 mm_jpeg_debug_exif_params_t *debug_params =
11512 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11513 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11514 // AE
11515 if (debug_params->ae_debug_params_valid == TRUE) {
11516 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11517 debug_params->ae_debug_params);
11518 }
11519 // AWB
11520 if (debug_params->awb_debug_params_valid == TRUE) {
11521 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11522 debug_params->awb_debug_params);
11523 }
11524 // AF
11525 if (debug_params->af_debug_params_valid == TRUE) {
11526 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11527 debug_params->af_debug_params);
11528 }
11529 // ASD
11530 if (debug_params->asd_debug_params_valid == TRUE) {
11531 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11532 debug_params->asd_debug_params);
11533 }
11534 // Stats
11535 if (debug_params->stats_debug_params_valid == TRUE) {
11536 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11537 debug_params->stats_debug_params);
11538 }
11539 // BE Stats
11540 if (debug_params->bestats_debug_params_valid == TRUE) {
11541 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11542 debug_params->bestats_debug_params);
11543 }
11544 // BHIST
11545 if (debug_params->bhist_debug_params_valid == TRUE) {
11546 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11547 debug_params->bhist_debug_params);
11548 }
11549 // 3A Tuning
11550 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11551 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11552 debug_params->q3a_tuning_debug_params);
11553 }
11554 }
11555
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011556 // Add metadata which reprocess needs
11557 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11558 cam_reprocess_info_t *repro_info =
11559 (cam_reprocess_info_t *)frame_settings.find
11560 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011561 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011562 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011563 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011564 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011565 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011566 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011567 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011568 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011569 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011570 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011571 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011572 repro_info->pipeline_flip);
11573 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11574 repro_info->af_roi);
11575 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11576 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011577 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11578 CAM_INTF_PARM_ROTATION metadata then has been added in
11579 translateToHalMetadata. HAL need to keep this new rotation
11580 metadata. Otherwise, the old rotation info saved in the vendor tag
11581 would be used */
11582 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11583 CAM_INTF_PARM_ROTATION, reprocParam) {
11584 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11585 } else {
11586 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011587 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011588 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011589 }
11590
11591 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11592 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11593 roi.width and roi.height would be the final JPEG size.
11594 For now, HAL only checks this for reprocess request */
11595 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11596 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11597 uint8_t *enable =
11598 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11599 if (*enable == TRUE) {
11600 int32_t *crop_data =
11601 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11602 cam_stream_crop_info_t crop_meta;
11603 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11604 crop_meta.stream_id = 0;
11605 crop_meta.crop.left = crop_data[0];
11606 crop_meta.crop.top = crop_data[1];
11607 crop_meta.crop.width = crop_data[2];
11608 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011609 // The JPEG crop roi should match cpp output size
11610 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11611 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11612 crop_meta.roi_map.left = 0;
11613 crop_meta.roi_map.top = 0;
11614 crop_meta.roi_map.width = cpp_crop->crop.width;
11615 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011616 }
11617 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11618 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011619 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011620 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011621 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11622 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011623 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011624 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11625
11626 // Add JPEG scale information
11627 cam_dimension_t scale_dim;
11628 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11629 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11630 int32_t *roi =
11631 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11632 scale_dim.width = roi[2];
11633 scale_dim.height = roi[3];
11634 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11635 scale_dim);
11636 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11637 scale_dim.width, scale_dim.height, mCameraId);
11638 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011639 }
11640 }
11641
11642 return rc;
11643}
11644
11645/*===========================================================================
11646 * FUNCTION : saveRequestSettings
11647 *
11648 * DESCRIPTION: Add any settings that might have changed to the request settings
11649 * and save the settings to be applied on the frame
11650 *
11651 * PARAMETERS :
11652 * @jpegMetadata : the extracted and/or modified jpeg metadata
11653 * @request : request with initial settings
11654 *
11655 * RETURN :
11656 * camera_metadata_t* : pointer to the saved request settings
11657 *==========================================================================*/
11658camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11659 const CameraMetadata &jpegMetadata,
11660 camera3_capture_request_t *request)
11661{
11662 camera_metadata_t *resultMetadata;
11663 CameraMetadata camMetadata;
11664 camMetadata = request->settings;
11665
11666 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11667 int32_t thumbnail_size[2];
11668 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11669 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11670 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11671 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11672 }
11673
11674 if (request->input_buffer != NULL) {
11675 uint8_t reprocessFlags = 1;
11676 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11677 (uint8_t*)&reprocessFlags,
11678 sizeof(reprocessFlags));
11679 }
11680
11681 resultMetadata = camMetadata.release();
11682 return resultMetadata;
11683}
11684
11685/*===========================================================================
11686 * FUNCTION : setHalFpsRange
11687 *
11688 * DESCRIPTION: set FPS range parameter
11689 *
11690 *
11691 * PARAMETERS :
11692 * @settings : Metadata from framework
11693 * @hal_metadata: Metadata buffer
11694 *
11695 *
11696 * RETURN : success: NO_ERROR
11697 * failure:
11698 *==========================================================================*/
11699int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11700 metadata_buffer_t *hal_metadata)
11701{
11702 int32_t rc = NO_ERROR;
11703 cam_fps_range_t fps_range;
11704 fps_range.min_fps = (float)
11705 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11706 fps_range.max_fps = (float)
11707 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11708 fps_range.video_min_fps = fps_range.min_fps;
11709 fps_range.video_max_fps = fps_range.max_fps;
11710
11711 LOGD("aeTargetFpsRange fps: [%f %f]",
11712 fps_range.min_fps, fps_range.max_fps);
11713 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11714 * follows:
11715 * ---------------------------------------------------------------|
11716 * Video stream is absent in configure_streams |
11717 * (Camcorder preview before the first video record |
11718 * ---------------------------------------------------------------|
11719 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11720 * | | | vid_min/max_fps|
11721 * ---------------------------------------------------------------|
11722 * NO | [ 30, 240] | 240 | [240, 240] |
11723 * |-------------|-------------|----------------|
11724 * | [240, 240] | 240 | [240, 240] |
11725 * ---------------------------------------------------------------|
11726 * Video stream is present in configure_streams |
11727 * ---------------------------------------------------------------|
11728 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11729 * | | | vid_min/max_fps|
11730 * ---------------------------------------------------------------|
11731 * NO | [ 30, 240] | 240 | [240, 240] |
11732 * (camcorder prev |-------------|-------------|----------------|
11733 * after video rec | [240, 240] | 240 | [240, 240] |
11734 * is stopped) | | | |
11735 * ---------------------------------------------------------------|
11736 * YES | [ 30, 240] | 240 | [240, 240] |
11737 * |-------------|-------------|----------------|
11738 * | [240, 240] | 240 | [240, 240] |
11739 * ---------------------------------------------------------------|
11740 * When Video stream is absent in configure_streams,
11741 * preview fps = sensor_fps / batchsize
11742 * Eg: for 240fps at batchSize 4, preview = 60fps
11743 * for 120fps at batchSize 4, preview = 30fps
11744 *
11745 * When video stream is present in configure_streams, preview fps is as per
11746 * the ratio of preview buffers to video buffers requested in process
11747 * capture request
11748 */
11749 mBatchSize = 0;
11750 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11751 fps_range.min_fps = fps_range.video_max_fps;
11752 fps_range.video_min_fps = fps_range.video_max_fps;
11753 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11754 fps_range.max_fps);
11755 if (NAME_NOT_FOUND != val) {
11756 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11757 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11758 return BAD_VALUE;
11759 }
11760
11761 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11762 /* If batchmode is currently in progress and the fps changes,
11763 * set the flag to restart the sensor */
11764 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11765 (mHFRVideoFps != fps_range.max_fps)) {
11766 mNeedSensorRestart = true;
11767 }
11768 mHFRVideoFps = fps_range.max_fps;
11769 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11770 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11771 mBatchSize = MAX_HFR_BATCH_SIZE;
11772 }
11773 }
11774 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11775
11776 }
11777 } else {
11778 /* HFR mode is session param in backend/ISP. This should be reset when
11779 * in non-HFR mode */
11780 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11781 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11782 return BAD_VALUE;
11783 }
11784 }
11785 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11786 return BAD_VALUE;
11787 }
11788 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11789 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11790 return rc;
11791}
11792
11793/*===========================================================================
11794 * FUNCTION : translateToHalMetadata
11795 *
11796 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11797 *
11798 *
11799 * PARAMETERS :
11800 * @request : request sent from framework
11801 *
11802 *
11803 * RETURN : success: NO_ERROR
11804 * failure:
11805 *==========================================================================*/
11806int QCamera3HardwareInterface::translateToHalMetadata
11807 (const camera3_capture_request_t *request,
11808 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011809 uint32_t snapshotStreamId) {
11810 if (request == nullptr || hal_metadata == nullptr) {
11811 return BAD_VALUE;
11812 }
11813
11814 int64_t minFrameDuration = getMinFrameDuration(request);
11815
11816 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11817 minFrameDuration);
11818}
11819
11820int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11821 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11822 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11823
Thierry Strudel3d639192016-09-09 11:52:26 -070011824 int rc = 0;
11825 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011826 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011827
11828 /* Do not change the order of the following list unless you know what you are
11829 * doing.
11830 * The order is laid out in such a way that parameters in the front of the table
11831 * may be used to override the parameters later in the table. Examples are:
11832 * 1. META_MODE should precede AEC/AWB/AF MODE
11833 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11834 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11835 * 4. Any mode should precede it's corresponding settings
11836 */
11837 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11838 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11839 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11840 rc = BAD_VALUE;
11841 }
11842 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11843 if (rc != NO_ERROR) {
11844 LOGE("extractSceneMode failed");
11845 }
11846 }
11847
11848 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11849 uint8_t fwk_aeMode =
11850 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11851 uint8_t aeMode;
11852 int32_t redeye;
11853
11854 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11855 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011856 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11857 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011858 } else {
11859 aeMode = CAM_AE_MODE_ON;
11860 }
11861 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11862 redeye = 1;
11863 } else {
11864 redeye = 0;
11865 }
11866
11867 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11868 fwk_aeMode);
11869 if (NAME_NOT_FOUND != val) {
11870 int32_t flashMode = (int32_t)val;
11871 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11872 }
11873
11874 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11875 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11876 rc = BAD_VALUE;
11877 }
11878 }
11879
11880 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11881 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11882 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11883 fwk_whiteLevel);
11884 if (NAME_NOT_FOUND != val) {
11885 uint8_t whiteLevel = (uint8_t)val;
11886 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11887 rc = BAD_VALUE;
11888 }
11889 }
11890 }
11891
11892 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11893 uint8_t fwk_cacMode =
11894 frame_settings.find(
11895 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11896 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11897 fwk_cacMode);
11898 if (NAME_NOT_FOUND != val) {
11899 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11900 bool entryAvailable = FALSE;
11901 // Check whether Frameworks set CAC mode is supported in device or not
11902 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11903 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11904 entryAvailable = TRUE;
11905 break;
11906 }
11907 }
11908 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11909 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11910 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11911 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11912 if (entryAvailable == FALSE) {
11913 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11914 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11915 } else {
11916 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11917 // High is not supported and so set the FAST as spec say's underlying
11918 // device implementation can be the same for both modes.
11919 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11920 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11921 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11922 // in order to avoid the fps drop due to high quality
11923 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11924 } else {
11925 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11926 }
11927 }
11928 }
11929 LOGD("Final cacMode is %d", cacMode);
11930 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11931 rc = BAD_VALUE;
11932 }
11933 } else {
11934 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11935 }
11936 }
11937
Thierry Strudel2896d122017-02-23 19:18:03 -080011938 char af_value[PROPERTY_VALUE_MAX];
11939 property_get("persist.camera.af.infinity", af_value, "0");
11940
Jason Lee84ae9972017-02-24 13:24:24 -080011941 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011942 if (atoi(af_value) == 0) {
11943 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011944 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011945 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11946 fwk_focusMode);
11947 if (NAME_NOT_FOUND != val) {
11948 uint8_t focusMode = (uint8_t)val;
11949 LOGD("set focus mode %d", focusMode);
11950 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11951 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11952 rc = BAD_VALUE;
11953 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011954 }
11955 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011956 } else {
11957 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11958 LOGE("Focus forced to infinity %d", focusMode);
11959 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11960 rc = BAD_VALUE;
11961 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011962 }
11963
Jason Lee84ae9972017-02-24 13:24:24 -080011964 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11965 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011966 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11967 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11968 focalDistance)) {
11969 rc = BAD_VALUE;
11970 }
11971 }
11972
11973 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11974 uint8_t fwk_antibandingMode =
11975 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11976 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11977 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11978 if (NAME_NOT_FOUND != val) {
11979 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011980 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11981 if (m60HzZone) {
11982 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11983 } else {
11984 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11985 }
11986 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011987 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11988 hal_antibandingMode)) {
11989 rc = BAD_VALUE;
11990 }
11991 }
11992 }
11993
11994 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11995 int32_t expCompensation = frame_settings.find(
11996 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11997 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11998 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11999 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12000 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012001 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012002 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12003 expCompensation)) {
12004 rc = BAD_VALUE;
12005 }
12006 }
12007
12008 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12009 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12010 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12011 rc = BAD_VALUE;
12012 }
12013 }
12014 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12015 rc = setHalFpsRange(frame_settings, hal_metadata);
12016 if (rc != NO_ERROR) {
12017 LOGE("setHalFpsRange failed");
12018 }
12019 }
12020
12021 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12022 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12023 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12024 rc = BAD_VALUE;
12025 }
12026 }
12027
12028 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12029 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12030 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12031 fwk_effectMode);
12032 if (NAME_NOT_FOUND != val) {
12033 uint8_t effectMode = (uint8_t)val;
12034 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12035 rc = BAD_VALUE;
12036 }
12037 }
12038 }
12039
12040 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12041 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12042 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12043 colorCorrectMode)) {
12044 rc = BAD_VALUE;
12045 }
12046 }
12047
12048 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12049 cam_color_correct_gains_t colorCorrectGains;
12050 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12051 colorCorrectGains.gains[i] =
12052 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12053 }
12054 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12055 colorCorrectGains)) {
12056 rc = BAD_VALUE;
12057 }
12058 }
12059
12060 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12061 cam_color_correct_matrix_t colorCorrectTransform;
12062 cam_rational_type_t transform_elem;
12063 size_t num = 0;
12064 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12065 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12066 transform_elem.numerator =
12067 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12068 transform_elem.denominator =
12069 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12070 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12071 num++;
12072 }
12073 }
12074 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12075 colorCorrectTransform)) {
12076 rc = BAD_VALUE;
12077 }
12078 }
12079
12080 cam_trigger_t aecTrigger;
12081 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12082 aecTrigger.trigger_id = -1;
12083 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12084 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12085 aecTrigger.trigger =
12086 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12087 aecTrigger.trigger_id =
12088 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12089 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12090 aecTrigger)) {
12091 rc = BAD_VALUE;
12092 }
12093 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12094 aecTrigger.trigger, aecTrigger.trigger_id);
12095 }
12096
12097 /*af_trigger must come with a trigger id*/
12098 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12099 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12100 cam_trigger_t af_trigger;
12101 af_trigger.trigger =
12102 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12103 af_trigger.trigger_id =
12104 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12105 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12106 rc = BAD_VALUE;
12107 }
12108 LOGD("AfTrigger: %d AfTriggerID: %d",
12109 af_trigger.trigger, af_trigger.trigger_id);
12110 }
12111
12112 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12113 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12114 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12115 rc = BAD_VALUE;
12116 }
12117 }
12118 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12119 cam_edge_application_t edge_application;
12120 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012121
Thierry Strudel3d639192016-09-09 11:52:26 -070012122 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12123 edge_application.sharpness = 0;
12124 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012125 edge_application.sharpness =
12126 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12127 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12128 int32_t sharpness =
12129 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12130 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12131 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12132 LOGD("Setting edge mode sharpness %d", sharpness);
12133 edge_application.sharpness = sharpness;
12134 }
12135 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012136 }
12137 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12138 rc = BAD_VALUE;
12139 }
12140 }
12141
12142 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12143 int32_t respectFlashMode = 1;
12144 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12145 uint8_t fwk_aeMode =
12146 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012147 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12148 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12149 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012150 respectFlashMode = 0;
12151 LOGH("AE Mode controls flash, ignore android.flash.mode");
12152 }
12153 }
12154 if (respectFlashMode) {
12155 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12156 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12157 LOGH("flash mode after mapping %d", val);
12158 // To check: CAM_INTF_META_FLASH_MODE usage
12159 if (NAME_NOT_FOUND != val) {
12160 uint8_t flashMode = (uint8_t)val;
12161 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12162 rc = BAD_VALUE;
12163 }
12164 }
12165 }
12166 }
12167
12168 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12169 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12170 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12171 rc = BAD_VALUE;
12172 }
12173 }
12174
12175 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12176 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12177 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12178 flashFiringTime)) {
12179 rc = BAD_VALUE;
12180 }
12181 }
12182
12183 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12184 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12185 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12186 hotPixelMode)) {
12187 rc = BAD_VALUE;
12188 }
12189 }
12190
12191 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12192 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12193 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12194 lensAperture)) {
12195 rc = BAD_VALUE;
12196 }
12197 }
12198
12199 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12200 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12201 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12202 filterDensity)) {
12203 rc = BAD_VALUE;
12204 }
12205 }
12206
12207 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12208 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12209 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12210 focalLength)) {
12211 rc = BAD_VALUE;
12212 }
12213 }
12214
12215 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12216 uint8_t optStabMode =
12217 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12218 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12219 optStabMode)) {
12220 rc = BAD_VALUE;
12221 }
12222 }
12223
12224 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12225 uint8_t videoStabMode =
12226 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12227 LOGD("videoStabMode from APP = %d", videoStabMode);
12228 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12229 videoStabMode)) {
12230 rc = BAD_VALUE;
12231 }
12232 }
12233
12234
12235 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12236 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12237 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12238 noiseRedMode)) {
12239 rc = BAD_VALUE;
12240 }
12241 }
12242
12243 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12244 float reprocessEffectiveExposureFactor =
12245 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12246 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12247 reprocessEffectiveExposureFactor)) {
12248 rc = BAD_VALUE;
12249 }
12250 }
12251
12252 cam_crop_region_t scalerCropRegion;
12253 bool scalerCropSet = false;
12254 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12255 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12256 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12257 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12258 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12259
12260 // Map coordinate system from active array to sensor output.
12261 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12262 scalerCropRegion.width, scalerCropRegion.height);
12263
12264 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12265 scalerCropRegion)) {
12266 rc = BAD_VALUE;
12267 }
12268 scalerCropSet = true;
12269 }
12270
12271 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12272 int64_t sensorExpTime =
12273 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12274 LOGD("setting sensorExpTime %lld", sensorExpTime);
12275 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12276 sensorExpTime)) {
12277 rc = BAD_VALUE;
12278 }
12279 }
12280
12281 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12282 int64_t sensorFrameDuration =
12283 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012284 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12285 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12286 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12287 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12288 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12289 sensorFrameDuration)) {
12290 rc = BAD_VALUE;
12291 }
12292 }
12293
12294 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12295 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12296 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12297 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12298 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12299 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12300 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12301 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12302 sensorSensitivity)) {
12303 rc = BAD_VALUE;
12304 }
12305 }
12306
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012307#ifndef USE_HAL_3_3
12308 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12309 int32_t ispSensitivity =
12310 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12311 if (ispSensitivity <
12312 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12313 ispSensitivity =
12314 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12315 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12316 }
12317 if (ispSensitivity >
12318 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12319 ispSensitivity =
12320 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12321 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12322 }
12323 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12324 ispSensitivity)) {
12325 rc = BAD_VALUE;
12326 }
12327 }
12328#endif
12329
Thierry Strudel3d639192016-09-09 11:52:26 -070012330 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12331 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12332 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12333 rc = BAD_VALUE;
12334 }
12335 }
12336
12337 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12338 uint8_t fwk_facedetectMode =
12339 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12340
12341 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12342 fwk_facedetectMode);
12343
12344 if (NAME_NOT_FOUND != val) {
12345 uint8_t facedetectMode = (uint8_t)val;
12346 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12347 facedetectMode)) {
12348 rc = BAD_VALUE;
12349 }
12350 }
12351 }
12352
Thierry Strudel54dc9782017-02-15 12:12:10 -080012353 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012354 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012355 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012356 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12357 histogramMode)) {
12358 rc = BAD_VALUE;
12359 }
12360 }
12361
12362 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12363 uint8_t sharpnessMapMode =
12364 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12365 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12366 sharpnessMapMode)) {
12367 rc = BAD_VALUE;
12368 }
12369 }
12370
12371 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12372 uint8_t tonemapMode =
12373 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12374 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12375 rc = BAD_VALUE;
12376 }
12377 }
12378 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12379 /*All tonemap channels will have the same number of points*/
12380 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12381 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12382 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12383 cam_rgb_tonemap_curves tonemapCurves;
12384 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12385 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12386 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12387 tonemapCurves.tonemap_points_cnt,
12388 CAM_MAX_TONEMAP_CURVE_SIZE);
12389 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12390 }
12391
12392 /* ch0 = G*/
12393 size_t point = 0;
12394 cam_tonemap_curve_t tonemapCurveGreen;
12395 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12396 for (size_t j = 0; j < 2; j++) {
12397 tonemapCurveGreen.tonemap_points[i][j] =
12398 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12399 point++;
12400 }
12401 }
12402 tonemapCurves.curves[0] = tonemapCurveGreen;
12403
12404 /* ch 1 = B */
12405 point = 0;
12406 cam_tonemap_curve_t tonemapCurveBlue;
12407 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12408 for (size_t j = 0; j < 2; j++) {
12409 tonemapCurveBlue.tonemap_points[i][j] =
12410 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12411 point++;
12412 }
12413 }
12414 tonemapCurves.curves[1] = tonemapCurveBlue;
12415
12416 /* ch 2 = R */
12417 point = 0;
12418 cam_tonemap_curve_t tonemapCurveRed;
12419 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12420 for (size_t j = 0; j < 2; j++) {
12421 tonemapCurveRed.tonemap_points[i][j] =
12422 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12423 point++;
12424 }
12425 }
12426 tonemapCurves.curves[2] = tonemapCurveRed;
12427
12428 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12429 tonemapCurves)) {
12430 rc = BAD_VALUE;
12431 }
12432 }
12433
12434 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12435 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12436 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12437 captureIntent)) {
12438 rc = BAD_VALUE;
12439 }
12440 }
12441
12442 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12443 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12444 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12445 blackLevelLock)) {
12446 rc = BAD_VALUE;
12447 }
12448 }
12449
12450 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12451 uint8_t lensShadingMapMode =
12452 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12453 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12454 lensShadingMapMode)) {
12455 rc = BAD_VALUE;
12456 }
12457 }
12458
12459 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12460 cam_area_t roi;
12461 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012462 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012463
12464 // Map coordinate system from active array to sensor output.
12465 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12466 roi.rect.height);
12467
12468 if (scalerCropSet) {
12469 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12470 }
12471 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12472 rc = BAD_VALUE;
12473 }
12474 }
12475
12476 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12477 cam_area_t roi;
12478 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012479 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012480
12481 // Map coordinate system from active array to sensor output.
12482 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12483 roi.rect.height);
12484
12485 if (scalerCropSet) {
12486 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12487 }
12488 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12489 rc = BAD_VALUE;
12490 }
12491 }
12492
12493 // CDS for non-HFR non-video mode
12494 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12495 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12496 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12497 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12498 LOGE("Invalid CDS mode %d!", *fwk_cds);
12499 } else {
12500 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12501 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12502 rc = BAD_VALUE;
12503 }
12504 }
12505 }
12506
Thierry Strudel04e026f2016-10-10 11:27:36 -070012507 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012508 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012509 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012510 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12511 }
12512 if (m_bVideoHdrEnabled)
12513 vhdr = CAM_VIDEO_HDR_MODE_ON;
12514
Thierry Strudel54dc9782017-02-15 12:12:10 -080012515 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12516
12517 if(vhdr != curr_hdr_state)
12518 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12519
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012520 rc = setVideoHdrMode(mParameters, vhdr);
12521 if (rc != NO_ERROR) {
12522 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012523 }
12524
12525 //IR
12526 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12527 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12528 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012529 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12530 uint8_t isIRon = 0;
12531
12532 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012533 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12534 LOGE("Invalid IR mode %d!", fwk_ir);
12535 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012536 if(isIRon != curr_ir_state )
12537 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12538
Thierry Strudel04e026f2016-10-10 11:27:36 -070012539 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12540 CAM_INTF_META_IR_MODE, fwk_ir)) {
12541 rc = BAD_VALUE;
12542 }
12543 }
12544 }
12545
Thierry Strudel54dc9782017-02-15 12:12:10 -080012546 //Binning Correction Mode
12547 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12548 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12549 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12550 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12551 || (0 > fwk_binning_correction)) {
12552 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12553 } else {
12554 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12555 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12556 rc = BAD_VALUE;
12557 }
12558 }
12559 }
12560
Thierry Strudel269c81a2016-10-12 12:13:59 -070012561 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12562 float aec_speed;
12563 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12564 LOGD("AEC Speed :%f", aec_speed);
12565 if ( aec_speed < 0 ) {
12566 LOGE("Invalid AEC mode %f!", aec_speed);
12567 } else {
12568 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12569 aec_speed)) {
12570 rc = BAD_VALUE;
12571 }
12572 }
12573 }
12574
12575 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12576 float awb_speed;
12577 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12578 LOGD("AWB Speed :%f", awb_speed);
12579 if ( awb_speed < 0 ) {
12580 LOGE("Invalid AWB mode %f!", awb_speed);
12581 } else {
12582 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12583 awb_speed)) {
12584 rc = BAD_VALUE;
12585 }
12586 }
12587 }
12588
Thierry Strudel3d639192016-09-09 11:52:26 -070012589 // TNR
12590 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12591 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12592 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012593 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012594 cam_denoise_param_t tnr;
12595 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12596 tnr.process_plates =
12597 (cam_denoise_process_type_t)frame_settings.find(
12598 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12599 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012600
12601 if(b_TnrRequested != curr_tnr_state)
12602 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12603
Thierry Strudel3d639192016-09-09 11:52:26 -070012604 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12605 rc = BAD_VALUE;
12606 }
12607 }
12608
Thierry Strudel54dc9782017-02-15 12:12:10 -080012609 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012610 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012611 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012612 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12613 *exposure_metering_mode)) {
12614 rc = BAD_VALUE;
12615 }
12616 }
12617
Thierry Strudel3d639192016-09-09 11:52:26 -070012618 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12619 int32_t fwk_testPatternMode =
12620 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12621 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12622 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12623
12624 if (NAME_NOT_FOUND != testPatternMode) {
12625 cam_test_pattern_data_t testPatternData;
12626 memset(&testPatternData, 0, sizeof(testPatternData));
12627 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12628 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12629 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12630 int32_t *fwk_testPatternData =
12631 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12632 testPatternData.r = fwk_testPatternData[0];
12633 testPatternData.b = fwk_testPatternData[3];
12634 switch (gCamCapability[mCameraId]->color_arrangement) {
12635 case CAM_FILTER_ARRANGEMENT_RGGB:
12636 case CAM_FILTER_ARRANGEMENT_GRBG:
12637 testPatternData.gr = fwk_testPatternData[1];
12638 testPatternData.gb = fwk_testPatternData[2];
12639 break;
12640 case CAM_FILTER_ARRANGEMENT_GBRG:
12641 case CAM_FILTER_ARRANGEMENT_BGGR:
12642 testPatternData.gr = fwk_testPatternData[2];
12643 testPatternData.gb = fwk_testPatternData[1];
12644 break;
12645 default:
12646 LOGE("color arrangement %d is not supported",
12647 gCamCapability[mCameraId]->color_arrangement);
12648 break;
12649 }
12650 }
12651 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12652 testPatternData)) {
12653 rc = BAD_VALUE;
12654 }
12655 } else {
12656 LOGE("Invalid framework sensor test pattern mode %d",
12657 fwk_testPatternMode);
12658 }
12659 }
12660
12661 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12662 size_t count = 0;
12663 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12664 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12665 gps_coords.data.d, gps_coords.count, count);
12666 if (gps_coords.count != count) {
12667 rc = BAD_VALUE;
12668 }
12669 }
12670
12671 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12672 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12673 size_t count = 0;
12674 const char *gps_methods_src = (const char *)
12675 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12676 memset(gps_methods, '\0', sizeof(gps_methods));
12677 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12678 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12679 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12680 if (GPS_PROCESSING_METHOD_SIZE != count) {
12681 rc = BAD_VALUE;
12682 }
12683 }
12684
12685 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12686 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12687 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12688 gps_timestamp)) {
12689 rc = BAD_VALUE;
12690 }
12691 }
12692
12693 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12694 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12695 cam_rotation_info_t rotation_info;
12696 if (orientation == 0) {
12697 rotation_info.rotation = ROTATE_0;
12698 } else if (orientation == 90) {
12699 rotation_info.rotation = ROTATE_90;
12700 } else if (orientation == 180) {
12701 rotation_info.rotation = ROTATE_180;
12702 } else if (orientation == 270) {
12703 rotation_info.rotation = ROTATE_270;
12704 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012705 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012706 rotation_info.streamId = snapshotStreamId;
12707 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12708 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12709 rc = BAD_VALUE;
12710 }
12711 }
12712
12713 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12714 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12715 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12716 rc = BAD_VALUE;
12717 }
12718 }
12719
12720 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12721 uint32_t thumb_quality = (uint32_t)
12722 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12723 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12724 thumb_quality)) {
12725 rc = BAD_VALUE;
12726 }
12727 }
12728
12729 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12730 cam_dimension_t dim;
12731 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12732 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12733 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12734 rc = BAD_VALUE;
12735 }
12736 }
12737
12738 // Internal metadata
12739 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12740 size_t count = 0;
12741 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12742 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12743 privatedata.data.i32, privatedata.count, count);
12744 if (privatedata.count != count) {
12745 rc = BAD_VALUE;
12746 }
12747 }
12748
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012749 // ISO/Exposure Priority
12750 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12751 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12752 cam_priority_mode_t mode =
12753 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12754 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12755 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12756 use_iso_exp_pty.previewOnly = FALSE;
12757 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12758 use_iso_exp_pty.value = *ptr;
12759
12760 if(CAM_ISO_PRIORITY == mode) {
12761 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12762 use_iso_exp_pty)) {
12763 rc = BAD_VALUE;
12764 }
12765 }
12766 else {
12767 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12768 use_iso_exp_pty)) {
12769 rc = BAD_VALUE;
12770 }
12771 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012772
12773 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12774 rc = BAD_VALUE;
12775 }
12776 }
12777 } else {
12778 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12779 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012780 }
12781 }
12782
12783 // Saturation
12784 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12785 int32_t* use_saturation =
12786 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12787 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12788 rc = BAD_VALUE;
12789 }
12790 }
12791
Thierry Strudel3d639192016-09-09 11:52:26 -070012792 // EV step
12793 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12794 gCamCapability[mCameraId]->exp_compensation_step)) {
12795 rc = BAD_VALUE;
12796 }
12797
12798 // CDS info
12799 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12800 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12801 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12802
12803 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12804 CAM_INTF_META_CDS_DATA, *cdsData)) {
12805 rc = BAD_VALUE;
12806 }
12807 }
12808
Shuzhen Wang19463d72016-03-08 11:09:52 -080012809 // Hybrid AE
12810 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12811 uint8_t *hybrid_ae = (uint8_t *)
12812 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12813
12814 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12815 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12816 rc = BAD_VALUE;
12817 }
12818 }
12819
Shuzhen Wang14415f52016-11-16 18:26:18 -080012820 // Histogram
12821 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12822 uint8_t histogramMode =
12823 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12824 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12825 histogramMode)) {
12826 rc = BAD_VALUE;
12827 }
12828 }
12829
12830 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12831 int32_t histogramBins =
12832 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12834 histogramBins)) {
12835 rc = BAD_VALUE;
12836 }
12837 }
12838
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012839 // Tracking AF
12840 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12841 uint8_t trackingAfTrigger =
12842 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12843 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12844 trackingAfTrigger)) {
12845 rc = BAD_VALUE;
12846 }
12847 }
12848
Thierry Strudel3d639192016-09-09 11:52:26 -070012849 return rc;
12850}
12851
12852/*===========================================================================
12853 * FUNCTION : captureResultCb
12854 *
12855 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12856 *
12857 * PARAMETERS :
12858 * @frame : frame information from mm-camera-interface
12859 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12860 * @userdata: userdata
12861 *
12862 * RETURN : NONE
12863 *==========================================================================*/
12864void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12865 camera3_stream_buffer_t *buffer,
12866 uint32_t frame_number, bool isInputBuffer, void *userdata)
12867{
12868 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12869 if (hw == NULL) {
12870 LOGE("Invalid hw %p", hw);
12871 return;
12872 }
12873
12874 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12875 return;
12876}
12877
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012878/*===========================================================================
12879 * FUNCTION : setBufferErrorStatus
12880 *
12881 * DESCRIPTION: Callback handler for channels to report any buffer errors
12882 *
12883 * PARAMETERS :
12884 * @ch : Channel on which buffer error is reported from
12885 * @frame_number : frame number on which buffer error is reported on
12886 * @buffer_status : buffer error status
12887 * @userdata: userdata
12888 *
12889 * RETURN : NONE
12890 *==========================================================================*/
12891void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12892 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12893{
12894 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12895 if (hw == NULL) {
12896 LOGE("Invalid hw %p", hw);
12897 return;
12898 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012899
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012900 hw->setBufferErrorStatus(ch, frame_number, err);
12901 return;
12902}
12903
12904void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12905 uint32_t frameNumber, camera3_buffer_status_t err)
12906{
12907 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12908 pthread_mutex_lock(&mMutex);
12909
12910 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12911 if (req.frame_number != frameNumber)
12912 continue;
12913 for (auto& k : req.mPendingBufferList) {
12914 if(k.stream->priv == ch) {
12915 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12916 }
12917 }
12918 }
12919
12920 pthread_mutex_unlock(&mMutex);
12921 return;
12922}
Thierry Strudel3d639192016-09-09 11:52:26 -070012923/*===========================================================================
12924 * FUNCTION : initialize
12925 *
12926 * DESCRIPTION: Pass framework callback pointers to HAL
12927 *
12928 * PARAMETERS :
12929 *
12930 *
12931 * RETURN : Success : 0
12932 * Failure: -ENODEV
12933 *==========================================================================*/
12934
12935int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12936 const camera3_callback_ops_t *callback_ops)
12937{
12938 LOGD("E");
12939 QCamera3HardwareInterface *hw =
12940 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12941 if (!hw) {
12942 LOGE("NULL camera device");
12943 return -ENODEV;
12944 }
12945
12946 int rc = hw->initialize(callback_ops);
12947 LOGD("X");
12948 return rc;
12949}
12950
12951/*===========================================================================
12952 * FUNCTION : configure_streams
12953 *
12954 * DESCRIPTION:
12955 *
12956 * PARAMETERS :
12957 *
12958 *
12959 * RETURN : Success: 0
12960 * Failure: -EINVAL (if stream configuration is invalid)
12961 * -ENODEV (fatal error)
12962 *==========================================================================*/
12963
12964int QCamera3HardwareInterface::configure_streams(
12965 const struct camera3_device *device,
12966 camera3_stream_configuration_t *stream_list)
12967{
12968 LOGD("E");
12969 QCamera3HardwareInterface *hw =
12970 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12971 if (!hw) {
12972 LOGE("NULL camera device");
12973 return -ENODEV;
12974 }
12975 int rc = hw->configureStreams(stream_list);
12976 LOGD("X");
12977 return rc;
12978}
12979
12980/*===========================================================================
12981 * FUNCTION : construct_default_request_settings
12982 *
12983 * DESCRIPTION: Configure a settings buffer to meet the required use case
12984 *
12985 * PARAMETERS :
12986 *
12987 *
12988 * RETURN : Success: Return valid metadata
12989 * Failure: Return NULL
12990 *==========================================================================*/
12991const camera_metadata_t* QCamera3HardwareInterface::
12992 construct_default_request_settings(const struct camera3_device *device,
12993 int type)
12994{
12995
12996 LOGD("E");
12997 camera_metadata_t* fwk_metadata = NULL;
12998 QCamera3HardwareInterface *hw =
12999 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13000 if (!hw) {
13001 LOGE("NULL camera device");
13002 return NULL;
13003 }
13004
13005 fwk_metadata = hw->translateCapabilityToMetadata(type);
13006
13007 LOGD("X");
13008 return fwk_metadata;
13009}
13010
13011/*===========================================================================
13012 * FUNCTION : process_capture_request
13013 *
13014 * DESCRIPTION:
13015 *
13016 * PARAMETERS :
13017 *
13018 *
13019 * RETURN :
13020 *==========================================================================*/
13021int QCamera3HardwareInterface::process_capture_request(
13022 const struct camera3_device *device,
13023 camera3_capture_request_t *request)
13024{
13025 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013026 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013027 QCamera3HardwareInterface *hw =
13028 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13029 if (!hw) {
13030 LOGE("NULL camera device");
13031 return -EINVAL;
13032 }
13033
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013034 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013035 LOGD("X");
13036 return rc;
13037}
13038
13039/*===========================================================================
13040 * FUNCTION : dump
13041 *
13042 * DESCRIPTION:
13043 *
13044 * PARAMETERS :
13045 *
13046 *
13047 * RETURN :
13048 *==========================================================================*/
13049
13050void QCamera3HardwareInterface::dump(
13051 const struct camera3_device *device, int fd)
13052{
13053 /* Log level property is read when "adb shell dumpsys media.camera" is
13054 called so that the log level can be controlled without restarting
13055 the media server */
13056 getLogLevel();
13057
13058 LOGD("E");
13059 QCamera3HardwareInterface *hw =
13060 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13061 if (!hw) {
13062 LOGE("NULL camera device");
13063 return;
13064 }
13065
13066 hw->dump(fd);
13067 LOGD("X");
13068 return;
13069}
13070
13071/*===========================================================================
13072 * FUNCTION : flush
13073 *
13074 * DESCRIPTION:
13075 *
13076 * PARAMETERS :
13077 *
13078 *
13079 * RETURN :
13080 *==========================================================================*/
13081
13082int QCamera3HardwareInterface::flush(
13083 const struct camera3_device *device)
13084{
13085 int rc;
13086 LOGD("E");
13087 QCamera3HardwareInterface *hw =
13088 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13089 if (!hw) {
13090 LOGE("NULL camera device");
13091 return -EINVAL;
13092 }
13093
13094 pthread_mutex_lock(&hw->mMutex);
13095 // Validate current state
13096 switch (hw->mState) {
13097 case STARTED:
13098 /* valid state */
13099 break;
13100
13101 case ERROR:
13102 pthread_mutex_unlock(&hw->mMutex);
13103 hw->handleCameraDeviceError();
13104 return -ENODEV;
13105
13106 default:
13107 LOGI("Flush returned during state %d", hw->mState);
13108 pthread_mutex_unlock(&hw->mMutex);
13109 return 0;
13110 }
13111 pthread_mutex_unlock(&hw->mMutex);
13112
13113 rc = hw->flush(true /* restart channels */ );
13114 LOGD("X");
13115 return rc;
13116}
13117
13118/*===========================================================================
13119 * FUNCTION : close_camera_device
13120 *
13121 * DESCRIPTION:
13122 *
13123 * PARAMETERS :
13124 *
13125 *
13126 * RETURN :
13127 *==========================================================================*/
13128int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13129{
13130 int ret = NO_ERROR;
13131 QCamera3HardwareInterface *hw =
13132 reinterpret_cast<QCamera3HardwareInterface *>(
13133 reinterpret_cast<camera3_device_t *>(device)->priv);
13134 if (!hw) {
13135 LOGE("NULL camera device");
13136 return BAD_VALUE;
13137 }
13138
13139 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13140 delete hw;
13141 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013142 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013143 return ret;
13144}
13145
13146/*===========================================================================
13147 * FUNCTION : getWaveletDenoiseProcessPlate
13148 *
13149 * DESCRIPTION: query wavelet denoise process plate
13150 *
13151 * PARAMETERS : None
13152 *
13153 * RETURN : WNR prcocess plate value
13154 *==========================================================================*/
13155cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13156{
13157 char prop[PROPERTY_VALUE_MAX];
13158 memset(prop, 0, sizeof(prop));
13159 property_get("persist.denoise.process.plates", prop, "0");
13160 int processPlate = atoi(prop);
13161 switch(processPlate) {
13162 case 0:
13163 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13164 case 1:
13165 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13166 case 2:
13167 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13168 case 3:
13169 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13170 default:
13171 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13172 }
13173}
13174
13175
13176/*===========================================================================
13177 * FUNCTION : getTemporalDenoiseProcessPlate
13178 *
13179 * DESCRIPTION: query temporal denoise process plate
13180 *
13181 * PARAMETERS : None
13182 *
13183 * RETURN : TNR prcocess plate value
13184 *==========================================================================*/
13185cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13186{
13187 char prop[PROPERTY_VALUE_MAX];
13188 memset(prop, 0, sizeof(prop));
13189 property_get("persist.tnr.process.plates", prop, "0");
13190 int processPlate = atoi(prop);
13191 switch(processPlate) {
13192 case 0:
13193 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13194 case 1:
13195 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13196 case 2:
13197 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13198 case 3:
13199 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13200 default:
13201 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13202 }
13203}
13204
13205
13206/*===========================================================================
13207 * FUNCTION : extractSceneMode
13208 *
13209 * DESCRIPTION: Extract scene mode from frameworks set metadata
13210 *
13211 * PARAMETERS :
13212 * @frame_settings: CameraMetadata reference
13213 * @metaMode: ANDROID_CONTORL_MODE
13214 * @hal_metadata: hal metadata structure
13215 *
13216 * RETURN : None
13217 *==========================================================================*/
13218int32_t QCamera3HardwareInterface::extractSceneMode(
13219 const CameraMetadata &frame_settings, uint8_t metaMode,
13220 metadata_buffer_t *hal_metadata)
13221{
13222 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013223 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13224
13225 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13226 LOGD("Ignoring control mode OFF_KEEP_STATE");
13227 return NO_ERROR;
13228 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013229
13230 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13231 camera_metadata_ro_entry entry =
13232 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13233 if (0 == entry.count)
13234 return rc;
13235
13236 uint8_t fwk_sceneMode = entry.data.u8[0];
13237
13238 int val = lookupHalName(SCENE_MODES_MAP,
13239 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13240 fwk_sceneMode);
13241 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013242 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013243 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013244 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013245 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013246
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013247 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13248 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13249 }
13250
13251 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13252 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013253 cam_hdr_param_t hdr_params;
13254 hdr_params.hdr_enable = 1;
13255 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13256 hdr_params.hdr_need_1x = false;
13257 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13258 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13259 rc = BAD_VALUE;
13260 }
13261 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013262
Thierry Strudel3d639192016-09-09 11:52:26 -070013263 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13264 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13265 rc = BAD_VALUE;
13266 }
13267 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013268
13269 if (mForceHdrSnapshot) {
13270 cam_hdr_param_t hdr_params;
13271 hdr_params.hdr_enable = 1;
13272 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13273 hdr_params.hdr_need_1x = false;
13274 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13275 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13276 rc = BAD_VALUE;
13277 }
13278 }
13279
Thierry Strudel3d639192016-09-09 11:52:26 -070013280 return rc;
13281}
13282
13283/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013284 * FUNCTION : setVideoHdrMode
13285 *
13286 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13287 *
13288 * PARAMETERS :
13289 * @hal_metadata: hal metadata structure
13290 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13291 *
13292 * RETURN : None
13293 *==========================================================================*/
13294int32_t QCamera3HardwareInterface::setVideoHdrMode(
13295 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13296{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013297 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13298 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13299 }
13300
13301 LOGE("Invalid Video HDR mode %d!", vhdr);
13302 return BAD_VALUE;
13303}
13304
13305/*===========================================================================
13306 * FUNCTION : setSensorHDR
13307 *
13308 * DESCRIPTION: Enable/disable sensor HDR.
13309 *
13310 * PARAMETERS :
13311 * @hal_metadata: hal metadata structure
13312 * @enable: boolean whether to enable/disable sensor HDR
13313 *
13314 * RETURN : None
13315 *==========================================================================*/
13316int32_t QCamera3HardwareInterface::setSensorHDR(
13317 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13318{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013319 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013320 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13321
13322 if (enable) {
13323 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13324 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13325 #ifdef _LE_CAMERA_
13326 //Default to staggered HDR for IOT
13327 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13328 #else
13329 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13330 #endif
13331 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13332 }
13333
13334 bool isSupported = false;
13335 switch (sensor_hdr) {
13336 case CAM_SENSOR_HDR_IN_SENSOR:
13337 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13338 CAM_QCOM_FEATURE_SENSOR_HDR) {
13339 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013340 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013341 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013342 break;
13343 case CAM_SENSOR_HDR_ZIGZAG:
13344 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13345 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13346 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013347 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013348 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013349 break;
13350 case CAM_SENSOR_HDR_STAGGERED:
13351 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13352 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13353 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013354 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013355 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013356 break;
13357 case CAM_SENSOR_HDR_OFF:
13358 isSupported = true;
13359 LOGD("Turning off sensor HDR");
13360 break;
13361 default:
13362 LOGE("HDR mode %d not supported", sensor_hdr);
13363 rc = BAD_VALUE;
13364 break;
13365 }
13366
13367 if(isSupported) {
13368 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13369 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13370 rc = BAD_VALUE;
13371 } else {
13372 if(!isVideoHdrEnable)
13373 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013374 }
13375 }
13376 return rc;
13377}
13378
13379/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013380 * FUNCTION : needRotationReprocess
13381 *
13382 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13383 *
13384 * PARAMETERS : none
13385 *
13386 * RETURN : true: needed
13387 * false: no need
13388 *==========================================================================*/
13389bool QCamera3HardwareInterface::needRotationReprocess()
13390{
13391 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13392 // current rotation is not zero, and pp has the capability to process rotation
13393 LOGH("need do reprocess for rotation");
13394 return true;
13395 }
13396
13397 return false;
13398}
13399
13400/*===========================================================================
13401 * FUNCTION : needReprocess
13402 *
13403 * DESCRIPTION: if reprocess in needed
13404 *
13405 * PARAMETERS : none
13406 *
13407 * RETURN : true: needed
13408 * false: no need
13409 *==========================================================================*/
13410bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13411{
13412 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13413 // TODO: add for ZSL HDR later
13414 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13415 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13416 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13417 return true;
13418 } else {
13419 LOGH("already post processed frame");
13420 return false;
13421 }
13422 }
13423 return needRotationReprocess();
13424}
13425
13426/*===========================================================================
13427 * FUNCTION : needJpegExifRotation
13428 *
13429 * DESCRIPTION: if rotation from jpeg is needed
13430 *
13431 * PARAMETERS : none
13432 *
13433 * RETURN : true: needed
13434 * false: no need
13435 *==========================================================================*/
13436bool QCamera3HardwareInterface::needJpegExifRotation()
13437{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013438 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013439 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13440 LOGD("Need use Jpeg EXIF Rotation");
13441 return true;
13442 }
13443 return false;
13444}
13445
13446/*===========================================================================
13447 * FUNCTION : addOfflineReprocChannel
13448 *
13449 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13450 * coming from input channel
13451 *
13452 * PARAMETERS :
13453 * @config : reprocess configuration
13454 * @inputChHandle : pointer to the input (source) channel
13455 *
13456 *
13457 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13458 *==========================================================================*/
13459QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13460 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13461{
13462 int32_t rc = NO_ERROR;
13463 QCamera3ReprocessChannel *pChannel = NULL;
13464
13465 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013466 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13467 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013468 if (NULL == pChannel) {
13469 LOGE("no mem for reprocess channel");
13470 return NULL;
13471 }
13472
13473 rc = pChannel->initialize(IS_TYPE_NONE);
13474 if (rc != NO_ERROR) {
13475 LOGE("init reprocess channel failed, ret = %d", rc);
13476 delete pChannel;
13477 return NULL;
13478 }
13479
13480 // pp feature config
13481 cam_pp_feature_config_t pp_config;
13482 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13483
13484 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13485 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13486 & CAM_QCOM_FEATURE_DSDN) {
13487 //Use CPP CDS incase h/w supports it.
13488 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13489 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13490 }
13491 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13492 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13493 }
13494
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013495 if (config.hdr_param.hdr_enable) {
13496 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13497 pp_config.hdr_param = config.hdr_param;
13498 }
13499
13500 if (mForceHdrSnapshot) {
13501 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13502 pp_config.hdr_param.hdr_enable = 1;
13503 pp_config.hdr_param.hdr_need_1x = 0;
13504 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13505 }
13506
Thierry Strudel3d639192016-09-09 11:52:26 -070013507 rc = pChannel->addReprocStreamsFromSource(pp_config,
13508 config,
13509 IS_TYPE_NONE,
13510 mMetadataChannel);
13511
13512 if (rc != NO_ERROR) {
13513 delete pChannel;
13514 return NULL;
13515 }
13516 return pChannel;
13517}
13518
13519/*===========================================================================
13520 * FUNCTION : getMobicatMask
13521 *
13522 * DESCRIPTION: returns mobicat mask
13523 *
13524 * PARAMETERS : none
13525 *
13526 * RETURN : mobicat mask
13527 *
13528 *==========================================================================*/
13529uint8_t QCamera3HardwareInterface::getMobicatMask()
13530{
13531 return m_MobicatMask;
13532}
13533
13534/*===========================================================================
13535 * FUNCTION : setMobicat
13536 *
13537 * DESCRIPTION: set Mobicat on/off.
13538 *
13539 * PARAMETERS :
13540 * @params : none
13541 *
13542 * RETURN : int32_t type of status
13543 * NO_ERROR -- success
13544 * none-zero failure code
13545 *==========================================================================*/
13546int32_t QCamera3HardwareInterface::setMobicat()
13547{
13548 char value [PROPERTY_VALUE_MAX];
13549 property_get("persist.camera.mobicat", value, "0");
13550 int32_t ret = NO_ERROR;
13551 uint8_t enableMobi = (uint8_t)atoi(value);
13552
13553 if (enableMobi) {
13554 tune_cmd_t tune_cmd;
13555 tune_cmd.type = SET_RELOAD_CHROMATIX;
13556 tune_cmd.module = MODULE_ALL;
13557 tune_cmd.value = TRUE;
13558 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13559 CAM_INTF_PARM_SET_VFE_COMMAND,
13560 tune_cmd);
13561
13562 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13563 CAM_INTF_PARM_SET_PP_COMMAND,
13564 tune_cmd);
13565 }
13566 m_MobicatMask = enableMobi;
13567
13568 return ret;
13569}
13570
13571/*===========================================================================
13572* FUNCTION : getLogLevel
13573*
13574* DESCRIPTION: Reads the log level property into a variable
13575*
13576* PARAMETERS :
13577* None
13578*
13579* RETURN :
13580* None
13581*==========================================================================*/
13582void QCamera3HardwareInterface::getLogLevel()
13583{
13584 char prop[PROPERTY_VALUE_MAX];
13585 uint32_t globalLogLevel = 0;
13586
13587 property_get("persist.camera.hal.debug", prop, "0");
13588 int val = atoi(prop);
13589 if (0 <= val) {
13590 gCamHal3LogLevel = (uint32_t)val;
13591 }
13592
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013593 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013594 gKpiDebugLevel = atoi(prop);
13595
13596 property_get("persist.camera.global.debug", prop, "0");
13597 val = atoi(prop);
13598 if (0 <= val) {
13599 globalLogLevel = (uint32_t)val;
13600 }
13601
13602 /* Highest log level among hal.logs and global.logs is selected */
13603 if (gCamHal3LogLevel < globalLogLevel)
13604 gCamHal3LogLevel = globalLogLevel;
13605
13606 return;
13607}
13608
13609/*===========================================================================
13610 * FUNCTION : validateStreamRotations
13611 *
13612 * DESCRIPTION: Check if the rotations requested are supported
13613 *
13614 * PARAMETERS :
13615 * @stream_list : streams to be configured
13616 *
13617 * RETURN : NO_ERROR on success
13618 * -EINVAL on failure
13619 *
13620 *==========================================================================*/
13621int QCamera3HardwareInterface::validateStreamRotations(
13622 camera3_stream_configuration_t *streamList)
13623{
13624 int rc = NO_ERROR;
13625
13626 /*
13627 * Loop through all streams requested in configuration
13628 * Check if unsupported rotations have been requested on any of them
13629 */
13630 for (size_t j = 0; j < streamList->num_streams; j++){
13631 camera3_stream_t *newStream = streamList->streams[j];
13632
13633 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13634 bool isImplDef = (newStream->format ==
13635 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13636 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13637 isImplDef);
13638
13639 if (isRotated && (!isImplDef || isZsl)) {
13640 LOGE("Error: Unsupported rotation of %d requested for stream"
13641 "type:%d and stream format:%d",
13642 newStream->rotation, newStream->stream_type,
13643 newStream->format);
13644 rc = -EINVAL;
13645 break;
13646 }
13647 }
13648
13649 return rc;
13650}
13651
13652/*===========================================================================
13653* FUNCTION : getFlashInfo
13654*
13655* DESCRIPTION: Retrieve information about whether the device has a flash.
13656*
13657* PARAMETERS :
13658* @cameraId : Camera id to query
13659* @hasFlash : Boolean indicating whether there is a flash device
13660* associated with given camera
13661* @flashNode : If a flash device exists, this will be its device node.
13662*
13663* RETURN :
13664* None
13665*==========================================================================*/
13666void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13667 bool& hasFlash,
13668 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13669{
13670 cam_capability_t* camCapability = gCamCapability[cameraId];
13671 if (NULL == camCapability) {
13672 hasFlash = false;
13673 flashNode[0] = '\0';
13674 } else {
13675 hasFlash = camCapability->flash_available;
13676 strlcpy(flashNode,
13677 (char*)camCapability->flash_dev_name,
13678 QCAMERA_MAX_FILEPATH_LENGTH);
13679 }
13680}
13681
13682/*===========================================================================
13683* FUNCTION : getEepromVersionInfo
13684*
13685* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13686*
13687* PARAMETERS : None
13688*
13689* RETURN : string describing EEPROM version
13690* "\0" if no such info available
13691*==========================================================================*/
13692const char *QCamera3HardwareInterface::getEepromVersionInfo()
13693{
13694 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13695}
13696
13697/*===========================================================================
13698* FUNCTION : getLdafCalib
13699*
13700* DESCRIPTION: Retrieve Laser AF calibration data
13701*
13702* PARAMETERS : None
13703*
13704* RETURN : Two uint32_t describing laser AF calibration data
13705* NULL if none is available.
13706*==========================================================================*/
13707const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13708{
13709 if (mLdafCalibExist) {
13710 return &mLdafCalib[0];
13711 } else {
13712 return NULL;
13713 }
13714}
13715
13716/*===========================================================================
13717 * FUNCTION : dynamicUpdateMetaStreamInfo
13718 *
13719 * DESCRIPTION: This function:
13720 * (1) stops all the channels
13721 * (2) returns error on pending requests and buffers
13722 * (3) sends metastream_info in setparams
13723 * (4) starts all channels
13724 * This is useful when sensor has to be restarted to apply any
13725 * settings such as frame rate from a different sensor mode
13726 *
13727 * PARAMETERS : None
13728 *
13729 * RETURN : NO_ERROR on success
13730 * Error codes on failure
13731 *
13732 *==========================================================================*/
13733int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13734{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013735 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013736 int rc = NO_ERROR;
13737
13738 LOGD("E");
13739
13740 rc = stopAllChannels();
13741 if (rc < 0) {
13742 LOGE("stopAllChannels failed");
13743 return rc;
13744 }
13745
13746 rc = notifyErrorForPendingRequests();
13747 if (rc < 0) {
13748 LOGE("notifyErrorForPendingRequests failed");
13749 return rc;
13750 }
13751
13752 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13753 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13754 "Format:%d",
13755 mStreamConfigInfo.type[i],
13756 mStreamConfigInfo.stream_sizes[i].width,
13757 mStreamConfigInfo.stream_sizes[i].height,
13758 mStreamConfigInfo.postprocess_mask[i],
13759 mStreamConfigInfo.format[i]);
13760 }
13761
13762 /* Send meta stream info once again so that ISP can start */
13763 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13764 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13765 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13766 mParameters);
13767 if (rc < 0) {
13768 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13769 }
13770
13771 rc = startAllChannels();
13772 if (rc < 0) {
13773 LOGE("startAllChannels failed");
13774 return rc;
13775 }
13776
13777 LOGD("X");
13778 return rc;
13779}
13780
13781/*===========================================================================
13782 * FUNCTION : stopAllChannels
13783 *
13784 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13785 *
13786 * PARAMETERS : None
13787 *
13788 * RETURN : NO_ERROR on success
13789 * Error codes on failure
13790 *
13791 *==========================================================================*/
13792int32_t QCamera3HardwareInterface::stopAllChannels()
13793{
13794 int32_t rc = NO_ERROR;
13795
13796 LOGD("Stopping all channels");
13797 // Stop the Streams/Channels
13798 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13799 it != mStreamInfo.end(); it++) {
13800 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13801 if (channel) {
13802 channel->stop();
13803 }
13804 (*it)->status = INVALID;
13805 }
13806
13807 if (mSupportChannel) {
13808 mSupportChannel->stop();
13809 }
13810 if (mAnalysisChannel) {
13811 mAnalysisChannel->stop();
13812 }
13813 if (mRawDumpChannel) {
13814 mRawDumpChannel->stop();
13815 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013816 if (mHdrPlusRawSrcChannel) {
13817 mHdrPlusRawSrcChannel->stop();
13818 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013819 if (mMetadataChannel) {
13820 /* If content of mStreamInfo is not 0, there is metadata stream */
13821 mMetadataChannel->stop();
13822 }
13823
13824 LOGD("All channels stopped");
13825 return rc;
13826}
13827
13828/*===========================================================================
13829 * FUNCTION : startAllChannels
13830 *
13831 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13832 *
13833 * PARAMETERS : None
13834 *
13835 * RETURN : NO_ERROR on success
13836 * Error codes on failure
13837 *
13838 *==========================================================================*/
13839int32_t QCamera3HardwareInterface::startAllChannels()
13840{
13841 int32_t rc = NO_ERROR;
13842
13843 LOGD("Start all channels ");
13844 // Start the Streams/Channels
13845 if (mMetadataChannel) {
13846 /* If content of mStreamInfo is not 0, there is metadata stream */
13847 rc = mMetadataChannel->start();
13848 if (rc < 0) {
13849 LOGE("META channel start failed");
13850 return rc;
13851 }
13852 }
13853 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13854 it != mStreamInfo.end(); it++) {
13855 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13856 if (channel) {
13857 rc = channel->start();
13858 if (rc < 0) {
13859 LOGE("channel start failed");
13860 return rc;
13861 }
13862 }
13863 }
13864 if (mAnalysisChannel) {
13865 mAnalysisChannel->start();
13866 }
13867 if (mSupportChannel) {
13868 rc = mSupportChannel->start();
13869 if (rc < 0) {
13870 LOGE("Support channel start failed");
13871 return rc;
13872 }
13873 }
13874 if (mRawDumpChannel) {
13875 rc = mRawDumpChannel->start();
13876 if (rc < 0) {
13877 LOGE("RAW dump channel start failed");
13878 return rc;
13879 }
13880 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013881 if (mHdrPlusRawSrcChannel) {
13882 rc = mHdrPlusRawSrcChannel->start();
13883 if (rc < 0) {
13884 LOGE("HDR+ RAW channel start failed");
13885 return rc;
13886 }
13887 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013888
13889 LOGD("All channels started");
13890 return rc;
13891}
13892
13893/*===========================================================================
13894 * FUNCTION : notifyErrorForPendingRequests
13895 *
13896 * DESCRIPTION: This function sends error for all the pending requests/buffers
13897 *
13898 * PARAMETERS : None
13899 *
13900 * RETURN : Error codes
13901 * NO_ERROR on success
13902 *
13903 *==========================================================================*/
13904int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13905{
13906 int32_t rc = NO_ERROR;
13907 unsigned int frameNum = 0;
13908 camera3_capture_result_t result;
13909 camera3_stream_buffer_t *pStream_Buf = NULL;
13910
13911 memset(&result, 0, sizeof(camera3_capture_result_t));
13912
13913 if (mPendingRequestsList.size() > 0) {
13914 pendingRequestIterator i = mPendingRequestsList.begin();
13915 frameNum = i->frame_number;
13916 } else {
13917 /* There might still be pending buffers even though there are
13918 no pending requests. Setting the frameNum to MAX so that
13919 all the buffers with smaller frame numbers are returned */
13920 frameNum = UINT_MAX;
13921 }
13922
13923 LOGH("Oldest frame num on mPendingRequestsList = %u",
13924 frameNum);
13925
Emilian Peev7650c122017-01-19 08:24:33 -080013926 notifyErrorFoPendingDepthData(mDepthChannel);
13927
Thierry Strudel3d639192016-09-09 11:52:26 -070013928 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13929 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13930
13931 if (req->frame_number < frameNum) {
13932 // Send Error notify to frameworks for each buffer for which
13933 // metadata buffer is already sent
13934 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13935 req->frame_number, req->mPendingBufferList.size());
13936
13937 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13938 if (NULL == pStream_Buf) {
13939 LOGE("No memory for pending buffers array");
13940 return NO_MEMORY;
13941 }
13942 memset(pStream_Buf, 0,
13943 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13944 result.result = NULL;
13945 result.frame_number = req->frame_number;
13946 result.num_output_buffers = req->mPendingBufferList.size();
13947 result.output_buffers = pStream_Buf;
13948
13949 size_t index = 0;
13950 for (auto info = req->mPendingBufferList.begin();
13951 info != req->mPendingBufferList.end(); ) {
13952
13953 camera3_notify_msg_t notify_msg;
13954 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13955 notify_msg.type = CAMERA3_MSG_ERROR;
13956 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13957 notify_msg.message.error.error_stream = info->stream;
13958 notify_msg.message.error.frame_number = req->frame_number;
13959 pStream_Buf[index].acquire_fence = -1;
13960 pStream_Buf[index].release_fence = -1;
13961 pStream_Buf[index].buffer = info->buffer;
13962 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13963 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013964 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013965 index++;
13966 // Remove buffer from list
13967 info = req->mPendingBufferList.erase(info);
13968 }
13969
13970 // Remove this request from Map
13971 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13972 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13973 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13974
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013975 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013976
13977 delete [] pStream_Buf;
13978 } else {
13979
13980 // Go through the pending requests info and send error request to framework
13981 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13982
13983 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13984
13985 // Send error notify to frameworks
13986 camera3_notify_msg_t notify_msg;
13987 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13988 notify_msg.type = CAMERA3_MSG_ERROR;
13989 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13990 notify_msg.message.error.error_stream = NULL;
13991 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013992 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013993
13994 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13995 if (NULL == pStream_Buf) {
13996 LOGE("No memory for pending buffers array");
13997 return NO_MEMORY;
13998 }
13999 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
14000
14001 result.result = NULL;
14002 result.frame_number = req->frame_number;
14003 result.input_buffer = i->input_buffer;
14004 result.num_output_buffers = req->mPendingBufferList.size();
14005 result.output_buffers = pStream_Buf;
14006
14007 size_t index = 0;
14008 for (auto info = req->mPendingBufferList.begin();
14009 info != req->mPendingBufferList.end(); ) {
14010 pStream_Buf[index].acquire_fence = -1;
14011 pStream_Buf[index].release_fence = -1;
14012 pStream_Buf[index].buffer = info->buffer;
14013 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
14014 pStream_Buf[index].stream = info->stream;
14015 index++;
14016 // Remove buffer from list
14017 info = req->mPendingBufferList.erase(info);
14018 }
14019
14020 // Remove this request from Map
14021 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
14022 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
14023 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
14024
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014025 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014026 delete [] pStream_Buf;
14027 i = erasePendingRequest(i);
14028 }
14029 }
14030
14031 /* Reset pending frame Drop list and requests list */
14032 mPendingFrameDropList.clear();
14033
14034 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
14035 req.mPendingBufferList.clear();
14036 }
14037 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070014038 LOGH("Cleared all the pending buffers ");
14039
14040 return rc;
14041}
14042
14043bool QCamera3HardwareInterface::isOnEncoder(
14044 const cam_dimension_t max_viewfinder_size,
14045 uint32_t width, uint32_t height)
14046{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014047 return ((width > (uint32_t)max_viewfinder_size.width) ||
14048 (height > (uint32_t)max_viewfinder_size.height) ||
14049 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14050 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014051}
14052
14053/*===========================================================================
14054 * FUNCTION : setBundleInfo
14055 *
14056 * DESCRIPTION: Set bundle info for all streams that are bundle.
14057 *
14058 * PARAMETERS : None
14059 *
14060 * RETURN : NO_ERROR on success
14061 * Error codes on failure
14062 *==========================================================================*/
14063int32_t QCamera3HardwareInterface::setBundleInfo()
14064{
14065 int32_t rc = NO_ERROR;
14066
14067 if (mChannelHandle) {
14068 cam_bundle_config_t bundleInfo;
14069 memset(&bundleInfo, 0, sizeof(bundleInfo));
14070 rc = mCameraHandle->ops->get_bundle_info(
14071 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14072 if (rc != NO_ERROR) {
14073 LOGE("get_bundle_info failed");
14074 return rc;
14075 }
14076 if (mAnalysisChannel) {
14077 mAnalysisChannel->setBundleInfo(bundleInfo);
14078 }
14079 if (mSupportChannel) {
14080 mSupportChannel->setBundleInfo(bundleInfo);
14081 }
14082 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14083 it != mStreamInfo.end(); it++) {
14084 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14085 channel->setBundleInfo(bundleInfo);
14086 }
14087 if (mRawDumpChannel) {
14088 mRawDumpChannel->setBundleInfo(bundleInfo);
14089 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014090 if (mHdrPlusRawSrcChannel) {
14091 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14092 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014093 }
14094
14095 return rc;
14096}
14097
14098/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014099 * FUNCTION : setInstantAEC
14100 *
14101 * DESCRIPTION: Set Instant AEC related params.
14102 *
14103 * PARAMETERS :
14104 * @meta: CameraMetadata reference
14105 *
14106 * RETURN : NO_ERROR on success
14107 * Error codes on failure
14108 *==========================================================================*/
14109int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14110{
14111 int32_t rc = NO_ERROR;
14112 uint8_t val = 0;
14113 char prop[PROPERTY_VALUE_MAX];
14114
14115 // First try to configure instant AEC from framework metadata
14116 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14117 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14118 }
14119
14120 // If framework did not set this value, try to read from set prop.
14121 if (val == 0) {
14122 memset(prop, 0, sizeof(prop));
14123 property_get("persist.camera.instant.aec", prop, "0");
14124 val = (uint8_t)atoi(prop);
14125 }
14126
14127 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14128 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14129 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14130 mInstantAEC = val;
14131 mInstantAECSettledFrameNumber = 0;
14132 mInstantAecFrameIdxCount = 0;
14133 LOGH("instantAEC value set %d",val);
14134 if (mInstantAEC) {
14135 memset(prop, 0, sizeof(prop));
14136 property_get("persist.camera.ae.instant.bound", prop, "10");
14137 int32_t aec_frame_skip_cnt = atoi(prop);
14138 if (aec_frame_skip_cnt >= 0) {
14139 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14140 } else {
14141 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14142 rc = BAD_VALUE;
14143 }
14144 }
14145 } else {
14146 LOGE("Bad instant aec value set %d", val);
14147 rc = BAD_VALUE;
14148 }
14149 return rc;
14150}
14151
14152/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014153 * FUNCTION : get_num_overall_buffers
14154 *
14155 * DESCRIPTION: Estimate number of pending buffers across all requests.
14156 *
14157 * PARAMETERS : None
14158 *
14159 * RETURN : Number of overall pending buffers
14160 *
14161 *==========================================================================*/
14162uint32_t PendingBuffersMap::get_num_overall_buffers()
14163{
14164 uint32_t sum_buffers = 0;
14165 for (auto &req : mPendingBuffersInRequest) {
14166 sum_buffers += req.mPendingBufferList.size();
14167 }
14168 return sum_buffers;
14169}
14170
14171/*===========================================================================
14172 * FUNCTION : removeBuf
14173 *
14174 * DESCRIPTION: Remove a matching buffer from tracker.
14175 *
14176 * PARAMETERS : @buffer: image buffer for the callback
14177 *
14178 * RETURN : None
14179 *
14180 *==========================================================================*/
14181void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14182{
14183 bool buffer_found = false;
14184 for (auto req = mPendingBuffersInRequest.begin();
14185 req != mPendingBuffersInRequest.end(); req++) {
14186 for (auto k = req->mPendingBufferList.begin();
14187 k != req->mPendingBufferList.end(); k++ ) {
14188 if (k->buffer == buffer) {
14189 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14190 req->frame_number, buffer);
14191 k = req->mPendingBufferList.erase(k);
14192 if (req->mPendingBufferList.empty()) {
14193 // Remove this request from Map
14194 req = mPendingBuffersInRequest.erase(req);
14195 }
14196 buffer_found = true;
14197 break;
14198 }
14199 }
14200 if (buffer_found) {
14201 break;
14202 }
14203 }
14204 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14205 get_num_overall_buffers());
14206}
14207
14208/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014209 * FUNCTION : getBufErrStatus
14210 *
14211 * DESCRIPTION: get buffer error status
14212 *
14213 * PARAMETERS : @buffer: buffer handle
14214 *
14215 * RETURN : Error status
14216 *
14217 *==========================================================================*/
14218int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14219{
14220 for (auto& req : mPendingBuffersInRequest) {
14221 for (auto& k : req.mPendingBufferList) {
14222 if (k.buffer == buffer)
14223 return k.bufStatus;
14224 }
14225 }
14226 return CAMERA3_BUFFER_STATUS_OK;
14227}
14228
14229/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014230 * FUNCTION : setPAAFSupport
14231 *
14232 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14233 * feature mask according to stream type and filter
14234 * arrangement
14235 *
14236 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14237 * @stream_type: stream type
14238 * @filter_arrangement: filter arrangement
14239 *
14240 * RETURN : None
14241 *==========================================================================*/
14242void QCamera3HardwareInterface::setPAAFSupport(
14243 cam_feature_mask_t& feature_mask,
14244 cam_stream_type_t stream_type,
14245 cam_color_filter_arrangement_t filter_arrangement)
14246{
Thierry Strudel3d639192016-09-09 11:52:26 -070014247 switch (filter_arrangement) {
14248 case CAM_FILTER_ARRANGEMENT_RGGB:
14249 case CAM_FILTER_ARRANGEMENT_GRBG:
14250 case CAM_FILTER_ARRANGEMENT_GBRG:
14251 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014252 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14253 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014254 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014255 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14256 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014257 }
14258 break;
14259 case CAM_FILTER_ARRANGEMENT_Y:
14260 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14261 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14262 }
14263 break;
14264 default:
14265 break;
14266 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014267 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14268 feature_mask, stream_type, filter_arrangement);
14269
14270
Thierry Strudel3d639192016-09-09 11:52:26 -070014271}
14272
14273/*===========================================================================
14274* FUNCTION : getSensorMountAngle
14275*
14276* DESCRIPTION: Retrieve sensor mount angle
14277*
14278* PARAMETERS : None
14279*
14280* RETURN : sensor mount angle in uint32_t
14281*==========================================================================*/
14282uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14283{
14284 return gCamCapability[mCameraId]->sensor_mount_angle;
14285}
14286
14287/*===========================================================================
14288* FUNCTION : getRelatedCalibrationData
14289*
14290* DESCRIPTION: Retrieve related system calibration data
14291*
14292* PARAMETERS : None
14293*
14294* RETURN : Pointer of related system calibration data
14295*==========================================================================*/
14296const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14297{
14298 return (const cam_related_system_calibration_data_t *)
14299 &(gCamCapability[mCameraId]->related_cam_calibration);
14300}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014301
14302/*===========================================================================
14303 * FUNCTION : is60HzZone
14304 *
14305 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14306 *
14307 * PARAMETERS : None
14308 *
14309 * RETURN : True if in 60Hz zone, False otherwise
14310 *==========================================================================*/
14311bool QCamera3HardwareInterface::is60HzZone()
14312{
14313 time_t t = time(NULL);
14314 struct tm lt;
14315
14316 struct tm* r = localtime_r(&t, &lt);
14317
14318 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14319 return true;
14320 else
14321 return false;
14322}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014323
14324/*===========================================================================
14325 * FUNCTION : adjustBlackLevelForCFA
14326 *
14327 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14328 * of bayer CFA (Color Filter Array).
14329 *
14330 * PARAMETERS : @input: black level pattern in the order of RGGB
14331 * @output: black level pattern in the order of CFA
14332 * @color_arrangement: CFA color arrangement
14333 *
14334 * RETURN : None
14335 *==========================================================================*/
14336template<typename T>
14337void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14338 T input[BLACK_LEVEL_PATTERN_CNT],
14339 T output[BLACK_LEVEL_PATTERN_CNT],
14340 cam_color_filter_arrangement_t color_arrangement)
14341{
14342 switch (color_arrangement) {
14343 case CAM_FILTER_ARRANGEMENT_GRBG:
14344 output[0] = input[1];
14345 output[1] = input[0];
14346 output[2] = input[3];
14347 output[3] = input[2];
14348 break;
14349 case CAM_FILTER_ARRANGEMENT_GBRG:
14350 output[0] = input[2];
14351 output[1] = input[3];
14352 output[2] = input[0];
14353 output[3] = input[1];
14354 break;
14355 case CAM_FILTER_ARRANGEMENT_BGGR:
14356 output[0] = input[3];
14357 output[1] = input[2];
14358 output[2] = input[1];
14359 output[3] = input[0];
14360 break;
14361 case CAM_FILTER_ARRANGEMENT_RGGB:
14362 output[0] = input[0];
14363 output[1] = input[1];
14364 output[2] = input[2];
14365 output[3] = input[3];
14366 break;
14367 default:
14368 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14369 break;
14370 }
14371}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014372
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014373void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14374 CameraMetadata &resultMetadata,
14375 std::shared_ptr<metadata_buffer_t> settings)
14376{
14377 if (settings == nullptr) {
14378 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14379 return;
14380 }
14381
14382 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14383 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14384 }
14385
14386 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14387 String8 str((const char *)gps_methods);
14388 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14389 }
14390
14391 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14392 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14393 }
14394
14395 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14396 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14397 }
14398
14399 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14400 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14401 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14402 }
14403
14404 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14405 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14406 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14407 }
14408
14409 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14410 int32_t fwk_thumb_size[2];
14411 fwk_thumb_size[0] = thumb_size->width;
14412 fwk_thumb_size[1] = thumb_size->height;
14413 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14414 }
14415
14416 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14417 uint8_t fwk_intent = intent[0];
14418 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14419 }
14420}
14421
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014422bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14423 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14424 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014425{
14426 if (hdrPlusRequest == nullptr) return false;
14427
14428 // Check noise reduction mode is high quality.
14429 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14430 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14431 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014432 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14433 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014434 return false;
14435 }
14436
14437 // Check edge mode is high quality.
14438 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14439 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14440 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14441 return false;
14442 }
14443
14444 if (request.num_output_buffers != 1 ||
14445 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14446 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014447 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14448 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14449 request.output_buffers[0].stream->width,
14450 request.output_buffers[0].stream->height,
14451 request.output_buffers[0].stream->format);
14452 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014453 return false;
14454 }
14455
14456 // Get a YUV buffer from pic channel.
14457 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14458 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14459 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14460 if (res != OK) {
14461 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14462 __FUNCTION__, strerror(-res), res);
14463 return false;
14464 }
14465
14466 pbcamera::StreamBuffer buffer;
14467 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014468 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014469 buffer.data = yuvBuffer->buffer;
14470 buffer.dataSize = yuvBuffer->frame_len;
14471
14472 pbcamera::CaptureRequest pbRequest;
14473 pbRequest.id = request.frame_number;
14474 pbRequest.outputBuffers.push_back(buffer);
14475
14476 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014477 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014478 if (res != OK) {
14479 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14480 strerror(-res), res);
14481 return false;
14482 }
14483
14484 hdrPlusRequest->yuvBuffer = yuvBuffer;
14485 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14486
14487 return true;
14488}
14489
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014490status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked() {
14491 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14492 return OK;
14493 }
14494
14495 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14496 if (res != OK) {
14497 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14498 strerror(-res), res);
14499 return res;
14500 }
14501 gHdrPlusClientOpening = true;
14502
14503 return OK;
14504}
14505
Chien-Yu Chenee335912017-02-09 17:53:20 -080014506status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14507{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014508 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014509
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014510 // Check if gHdrPlusClient is opened or being opened.
14511 if (gHdrPlusClient == nullptr) {
14512 if (gHdrPlusClientOpening) {
14513 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14514 return OK;
14515 }
14516
14517 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014518 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014519 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14520 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014521 return res;
14522 }
14523
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014524 // When opening HDR+ client completes, HDR+ mode will be enabled.
14525 return OK;
14526
Chien-Yu Chenee335912017-02-09 17:53:20 -080014527 }
14528
14529 // Configure stream for HDR+.
14530 res = configureHdrPlusStreamsLocked();
14531 if (res != OK) {
14532 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014533 return res;
14534 }
14535
14536 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14537 res = gHdrPlusClient->setZslHdrPlusMode(true);
14538 if (res != OK) {
14539 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014540 return res;
14541 }
14542
14543 mHdrPlusModeEnabled = true;
14544 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14545
14546 return OK;
14547}
14548
14549void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14550{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014551 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014552 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014553 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14554 if (res != OK) {
14555 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14556 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014557
14558 // Close HDR+ client so Easel can enter low power mode.
14559 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14560 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014561 }
14562
14563 mHdrPlusModeEnabled = false;
14564 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14565}
14566
14567status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014568{
14569 pbcamera::InputConfiguration inputConfig;
14570 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14571 status_t res = OK;
14572
14573 // Configure HDR+ client streams.
14574 // Get input config.
14575 if (mHdrPlusRawSrcChannel) {
14576 // HDR+ input buffers will be provided by HAL.
14577 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14578 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14579 if (res != OK) {
14580 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14581 __FUNCTION__, strerror(-res), res);
14582 return res;
14583 }
14584
14585 inputConfig.isSensorInput = false;
14586 } else {
14587 // Sensor MIPI will send data to Easel.
14588 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014589 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014590 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14591 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14592 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14593 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14594 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14595 if (mSensorModeInfo.num_raw_bits != 10) {
14596 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14597 mSensorModeInfo.num_raw_bits);
14598 return BAD_VALUE;
14599 }
14600
14601 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014602 }
14603
14604 // Get output configurations.
14605 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014606 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014607
14608 // Easel may need to output YUV output buffers if mPictureChannel was created.
14609 pbcamera::StreamConfiguration yuvOutputConfig;
14610 if (mPictureChannel != nullptr) {
14611 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14612 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14613 if (res != OK) {
14614 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14615 __FUNCTION__, strerror(-res), res);
14616
14617 return res;
14618 }
14619
14620 outputStreamConfigs.push_back(yuvOutputConfig);
14621 }
14622
14623 // TODO: consider other channels for YUV output buffers.
14624
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014625 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014626 if (res != OK) {
14627 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14628 strerror(-res), res);
14629 return res;
14630 }
14631
14632 return OK;
14633}
14634
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014635void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client) {
14636 if (client == nullptr) {
14637 ALOGE("%s: Opened client is null.", __FUNCTION__);
14638 return;
14639 }
14640
14641 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14642
14643 Mutex::Autolock l(gHdrPlusClientLock);
14644 gHdrPlusClient = std::move(client);
14645 gHdrPlusClientOpening = false;
14646
14647 // Set static metadata.
14648 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14649 if (res != OK) {
14650 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14651 __FUNCTION__, strerror(-res), res);
14652 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14653 gHdrPlusClient = nullptr;
14654 return;
14655 }
14656
14657 // Enable HDR+ mode.
14658 res = enableHdrPlusModeLocked();
14659 if (res != OK) {
14660 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14661 }
14662}
14663
14664void QCamera3HardwareInterface::onOpenFailed(status_t err) {
14665 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14666 Mutex::Autolock l(gHdrPlusClientLock);
14667 gHdrPlusClientOpening = false;
14668}
14669
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014670void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14671 const camera_metadata_t &resultMetadata) {
14672 if (result != nullptr) {
14673 if (result->outputBuffers.size() != 1) {
14674 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14675 result->outputBuffers.size());
14676 return;
14677 }
14678
14679 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14680 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14681 result->outputBuffers[0].streamId);
14682 return;
14683 }
14684
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014685 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014686 HdrPlusPendingRequest pendingRequest;
14687 {
14688 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14689 auto req = mHdrPlusPendingRequests.find(result->requestId);
14690 pendingRequest = req->second;
14691 }
14692
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014693 // Update the result metadata with the settings of the HDR+ still capture request because
14694 // the result metadata belongs to a ZSL buffer.
14695 CameraMetadata metadata;
14696 metadata = &resultMetadata;
14697 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14698 camera_metadata_t* updatedResultMetadata = metadata.release();
14699
14700 QCamera3PicChannel *picChannel =
14701 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14702
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014703 // Check if dumping HDR+ YUV output is enabled.
14704 char prop[PROPERTY_VALUE_MAX];
14705 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14706 bool dumpYuvOutput = atoi(prop);
14707
14708 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014709 // Dump yuv buffer to a ppm file.
14710 pbcamera::StreamConfiguration outputConfig;
14711 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14712 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14713 if (rc == OK) {
14714 char buf[FILENAME_MAX] = {};
14715 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14716 result->requestId, result->outputBuffers[0].streamId,
14717 outputConfig.image.width, outputConfig.image.height);
14718
14719 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14720 } else {
14721 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14722 __FUNCTION__, strerror(-rc), rc);
14723 }
14724 }
14725
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014726 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14727 auto halMetadata = std::make_shared<metadata_buffer_t>();
14728 clear_metadata_buffer(halMetadata.get());
14729
14730 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14731 // encoding.
14732 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14733 halStreamId, /*minFrameDuration*/0);
14734 if (res == OK) {
14735 // Return the buffer to pic channel for encoding.
14736 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14737 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14738 halMetadata);
14739 } else {
14740 // Return the buffer without encoding.
14741 // TODO: This should not happen but we may want to report an error buffer to camera
14742 // service.
14743 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14744 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14745 strerror(-res), res);
14746 }
14747
14748 // Send HDR+ metadata to framework.
14749 {
14750 pthread_mutex_lock(&mMutex);
14751
14752 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14753 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14754 pthread_mutex_unlock(&mMutex);
14755 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014756
14757 // Remove the HDR+ pending request.
14758 {
14759 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14760 auto req = mHdrPlusPendingRequests.find(result->requestId);
14761 mHdrPlusPendingRequests.erase(req);
14762 }
14763 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014764}
14765
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014766void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14767 // TODO: Handle HDR+ capture failures and send the failure to framework.
14768 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14769 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14770
14771 // Return the buffer to pic channel.
14772 QCamera3PicChannel *picChannel =
14773 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14774 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14775
14776 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014777}
14778
Thierry Strudel3d639192016-09-09 11:52:26 -070014779}; //end namespace qcamera