blob: 4ec3c0a3c7ce37e45c4c4d25812dd9509a4ce359 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070061#include "EaselManagerClient.h"
Chien-Yu Chene687bd02016-12-07 18:30:26 -080062
Thierry Strudel3d639192016-09-09 11:52:26 -070063extern "C" {
64#include "mm_camera_dbg.h"
65}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080066#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070067
68using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
75#define PARTIAL_RESULT_COUNT 2
76#define FRAME_SKIP_DELAY 0
77
78#define MAX_VALUE_8BIT ((1<<8)-1)
79#define MAX_VALUE_10BIT ((1<<10)-1)
80#define MAX_VALUE_12BIT ((1<<12)-1)
81
82#define VIDEO_4K_WIDTH 3840
83#define VIDEO_4K_HEIGHT 2160
84
Jason Leeb9e76432017-03-10 17:14:19 -080085#define MAX_EIS_WIDTH 3840
86#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070087
88#define MAX_RAW_STREAMS 1
89#define MAX_STALLING_STREAMS 1
90#define MAX_PROCESSED_STREAMS 3
91/* Batch mode is enabled only if FPS set is equal to or greater than this */
92#define MIN_FPS_FOR_BATCH_MODE (120)
93#define PREVIEW_FPS_FOR_HFR (30)
94#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080095#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070096#define MAX_HFR_BATCH_SIZE (8)
97#define REGIONS_TUPLE_COUNT 5
98#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070099// Set a threshold for detection of missing buffers //seconds
100#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800101#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700102#define FLUSH_TIMEOUT 3
103#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
104
105#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
106 CAM_QCOM_FEATURE_CROP |\
107 CAM_QCOM_FEATURE_ROTATION |\
108 CAM_QCOM_FEATURE_SHARPNESS |\
109 CAM_QCOM_FEATURE_SCALE |\
110 CAM_QCOM_FEATURE_CAC |\
111 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700112/* Per configuration size for static metadata length*/
113#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700114
115#define TIMEOUT_NEVER -1
116
Jason Lee8ce36fa2017-04-19 19:40:37 -0700117/* Face rect indices */
118#define FACE_LEFT 0
119#define FACE_TOP 1
120#define FACE_RIGHT 2
121#define FACE_BOTTOM 3
122#define FACE_WEIGHT 4
123
Thierry Strudel04e026f2016-10-10 11:27:36 -0700124/* Face landmarks indices */
125#define LEFT_EYE_X 0
126#define LEFT_EYE_Y 1
127#define RIGHT_EYE_X 2
128#define RIGHT_EYE_Y 3
129#define MOUTH_X 4
130#define MOUTH_Y 5
131#define TOTAL_LANDMARK_INDICES 6
132
Zhijun He2a5df222017-04-04 18:20:38 -0700133// Max preferred zoom
134#define MAX_PREFERRED_ZOOM_RATIO 5.0
135
Thierry Strudel3d639192016-09-09 11:52:26 -0700136cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
137const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
138extern pthread_mutex_t gCamLock;
139volatile uint32_t gCamHal3LogLevel = 1;
140extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700141
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800142// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700143// The following Easel related variables must be protected by gHdrPlusClientLock.
144EaselManagerClient gEaselManagerClient;
145bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
146std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
147bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700148bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700149bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700150
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800151// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
152bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700153
154Mutex gHdrPlusClientLock; // Protect above Easel related variables.
155
Thierry Strudel3d639192016-09-09 11:52:26 -0700156
157const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
158 {"On", CAM_CDS_MODE_ON},
159 {"Off", CAM_CDS_MODE_OFF},
160 {"Auto",CAM_CDS_MODE_AUTO}
161};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700162const QCamera3HardwareInterface::QCameraMap<
163 camera_metadata_enum_android_video_hdr_mode_t,
164 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
165 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
166 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
167};
168
Thierry Strudel54dc9782017-02-15 12:12:10 -0800169const QCamera3HardwareInterface::QCameraMap<
170 camera_metadata_enum_android_binning_correction_mode_t,
171 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
172 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
173 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
174};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700175
176const QCamera3HardwareInterface::QCameraMap<
177 camera_metadata_enum_android_ir_mode_t,
178 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
179 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
180 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
181 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
182};
Thierry Strudel3d639192016-09-09 11:52:26 -0700183
184const QCamera3HardwareInterface::QCameraMap<
185 camera_metadata_enum_android_control_effect_mode_t,
186 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
187 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
188 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
189 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
190 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
191 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
192 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
193 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
194 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
195 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
196};
197
198const QCamera3HardwareInterface::QCameraMap<
199 camera_metadata_enum_android_control_awb_mode_t,
200 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
201 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
202 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
203 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
204 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
205 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
206 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
207 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
208 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
209 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
210};
211
212const QCamera3HardwareInterface::QCameraMap<
213 camera_metadata_enum_android_control_scene_mode_t,
214 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
215 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
216 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
217 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
218 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
219 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
220 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
221 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
222 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
223 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
224 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
225 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
226 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
227 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
228 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
229 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800230 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
231 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700232};
233
234const QCamera3HardwareInterface::QCameraMap<
235 camera_metadata_enum_android_control_af_mode_t,
236 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
237 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
238 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
239 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
240 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
241 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
242 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
243 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
244};
245
246const QCamera3HardwareInterface::QCameraMap<
247 camera_metadata_enum_android_color_correction_aberration_mode_t,
248 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
249 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
250 CAM_COLOR_CORRECTION_ABERRATION_OFF },
251 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
252 CAM_COLOR_CORRECTION_ABERRATION_FAST },
253 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
254 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
255};
256
257const QCamera3HardwareInterface::QCameraMap<
258 camera_metadata_enum_android_control_ae_antibanding_mode_t,
259 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
260 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
261 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
262 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
263 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
264};
265
266const QCamera3HardwareInterface::QCameraMap<
267 camera_metadata_enum_android_control_ae_mode_t,
268 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
269 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
270 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
271 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
272 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
273 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
274};
275
276const QCamera3HardwareInterface::QCameraMap<
277 camera_metadata_enum_android_flash_mode_t,
278 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
279 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
280 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
281 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_statistics_face_detect_mode_t,
286 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
287 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
288 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
289 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
294 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
295 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
296 CAM_FOCUS_UNCALIBRATED },
297 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
298 CAM_FOCUS_APPROXIMATE },
299 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
300 CAM_FOCUS_CALIBRATED }
301};
302
303const QCamera3HardwareInterface::QCameraMap<
304 camera_metadata_enum_android_lens_state_t,
305 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
306 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
307 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
308};
309
310const int32_t available_thumbnail_sizes[] = {0, 0,
311 176, 144,
312 240, 144,
313 256, 144,
314 240, 160,
315 256, 154,
316 240, 240,
317 320, 240};
318
319const QCamera3HardwareInterface::QCameraMap<
320 camera_metadata_enum_android_sensor_test_pattern_mode_t,
321 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
322 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
323 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
324 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
325 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
326 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
327 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
328};
329
330/* Since there is no mapping for all the options some Android enum are not listed.
331 * Also, the order in this list is important because while mapping from HAL to Android it will
332 * traverse from lower to higher index which means that for HAL values that are map to different
333 * Android values, the traverse logic will select the first one found.
334 */
335const QCamera3HardwareInterface::QCameraMap<
336 camera_metadata_enum_android_sensor_reference_illuminant1_t,
337 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
338 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
339 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
340 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
341 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
342 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
343 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
344 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
354};
355
356const QCamera3HardwareInterface::QCameraMap<
357 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
358 { 60, CAM_HFR_MODE_60FPS},
359 { 90, CAM_HFR_MODE_90FPS},
360 { 120, CAM_HFR_MODE_120FPS},
361 { 150, CAM_HFR_MODE_150FPS},
362 { 180, CAM_HFR_MODE_180FPS},
363 { 210, CAM_HFR_MODE_210FPS},
364 { 240, CAM_HFR_MODE_240FPS},
365 { 480, CAM_HFR_MODE_480FPS},
366};
367
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700368const QCamera3HardwareInterface::QCameraMap<
369 qcamera3_ext_instant_aec_mode_t,
370 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
371 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
372 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
373 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
374};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800375
376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_exposure_meter_mode_t,
378 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
379 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
380 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
381 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
382 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
383 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
384 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
385 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
386};
387
388const QCamera3HardwareInterface::QCameraMap<
389 qcamera3_ext_iso_mode_t,
390 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
391 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
392 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
393 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
394 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
395 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
396 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
397 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
398 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
399};
400
Thierry Strudel3d639192016-09-09 11:52:26 -0700401camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
402 .initialize = QCamera3HardwareInterface::initialize,
403 .configure_streams = QCamera3HardwareInterface::configure_streams,
404 .register_stream_buffers = NULL,
405 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
406 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
407 .get_metadata_vendor_tag_ops = NULL,
408 .dump = QCamera3HardwareInterface::dump,
409 .flush = QCamera3HardwareInterface::flush,
410 .reserved = {0},
411};
412
413// initialise to some default value
414uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
415
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700416static inline void logEaselEvent(const char *tag, const char *event) {
417 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
418 struct timespec ts = {};
419 static int64_t kMsPerSec = 1000;
420 static int64_t kNsPerMs = 1000000;
421 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
422 if (res != OK) {
423 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
424 } else {
425 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
426 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
427 }
428 }
429}
430
Thierry Strudel3d639192016-09-09 11:52:26 -0700431/*===========================================================================
432 * FUNCTION : QCamera3HardwareInterface
433 *
434 * DESCRIPTION: constructor of QCamera3HardwareInterface
435 *
436 * PARAMETERS :
437 * @cameraId : camera ID
438 *
439 * RETURN : none
440 *==========================================================================*/
441QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
442 const camera_module_callbacks_t *callbacks)
443 : mCameraId(cameraId),
444 mCameraHandle(NULL),
445 mCameraInitialized(false),
446 mCallbackOps(NULL),
447 mMetadataChannel(NULL),
448 mPictureChannel(NULL),
449 mRawChannel(NULL),
450 mSupportChannel(NULL),
451 mAnalysisChannel(NULL),
452 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700453 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700454 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800455 mDepthChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800456 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700457 mChannelHandle(0),
458 mFirstConfiguration(true),
459 mFlush(false),
460 mFlushPerf(false),
461 mParamHeap(NULL),
462 mParameters(NULL),
463 mPrevParameters(NULL),
464 m_bIsVideo(false),
465 m_bIs4KVideo(false),
466 m_bEisSupportedSize(false),
467 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800468 m_bEis3PropertyEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700469 m_MobicatMask(0),
470 mMinProcessedFrameDuration(0),
471 mMinJpegFrameDuration(0),
472 mMinRawFrameDuration(0),
473 mMetaFrameCount(0U),
474 mUpdateDebugLevel(false),
475 mCallbacks(callbacks),
476 mCaptureIntent(0),
477 mCacMode(0),
Shuzhen Wang2abea3d2016-03-31 11:09:27 -0700478 mHybridAeEnable(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800479 /* DevCamDebug metadata internal m control*/
480 mDevCamDebugMetaEnable(0),
481 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mBatchSize(0),
483 mToBeQueuedVidBufs(0),
484 mHFRVideoFps(DEFAULT_VIDEO_FPS),
485 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800486 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800487 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700488 mFirstFrameNumberInBatch(0),
489 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800490 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700491 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
492 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000493 mPDSupported(false),
494 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700495 mInstantAEC(false),
496 mResetInstantAEC(false),
497 mInstantAECSettledFrameNumber(0),
498 mAecSkipDisplayFrameBound(0),
499 mInstantAecFrameIdxCount(0),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800500 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700502 mLastCustIntentFrmNum(-1),
503 mState(CLOSED),
504 mIsDeviceLinked(false),
505 mIsMainCamera(true),
506 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700507 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800508 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800509 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700510 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800511 mIsApInputUsedForHdrPlus(false),
512 mFirstPreviewIntentSeen(false),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800513 m_bSensorHDREnabled(false)
Thierry Strudel3d639192016-09-09 11:52:26 -0700514{
515 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mCommon.init(gCamCapability[cameraId]);
517 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700518#ifndef USE_HAL_3_3
519 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
520#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700521 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700522#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700523 mCameraDevice.common.close = close_camera_device;
524 mCameraDevice.ops = &mCameraOps;
525 mCameraDevice.priv = this;
526 gCamCapability[cameraId]->version = CAM_HAL_V3;
527 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
528 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
529 gCamCapability[cameraId]->min_num_pp_bufs = 3;
530
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800531 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700532
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800533 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mPendingLiveRequest = 0;
535 mCurrentRequestId = -1;
536 pthread_mutex_init(&mMutex, NULL);
537
538 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
539 mDefaultMetadata[i] = NULL;
540
541 // Getting system props of different kinds
542 char prop[PROPERTY_VALUE_MAX];
543 memset(prop, 0, sizeof(prop));
544 property_get("persist.camera.raw.dump", prop, "0");
545 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800546 property_get("persist.camera.hal3.force.hdr", prop, "0");
547 mForceHdrSnapshot = atoi(prop);
548
Thierry Strudel3d639192016-09-09 11:52:26 -0700549 if (mEnableRawDump)
550 LOGD("Raw dump from Camera HAL enabled");
551
552 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
553 memset(mLdafCalib, 0, sizeof(mLdafCalib));
554
555 memset(prop, 0, sizeof(prop));
556 property_get("persist.camera.tnr.preview", prop, "0");
557 m_bTnrPreview = (uint8_t)atoi(prop);
558
559 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800560 property_get("persist.camera.swtnr.preview", prop, "1");
561 m_bSwTnrPreview = (uint8_t)atoi(prop);
562
563 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700564 property_get("persist.camera.tnr.video", prop, "0");
565 m_bTnrVideo = (uint8_t)atoi(prop);
566
567 memset(prop, 0, sizeof(prop));
568 property_get("persist.camera.avtimer.debug", prop, "0");
569 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800570 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700571
Thierry Strudel54dc9782017-02-15 12:12:10 -0800572 memset(prop, 0, sizeof(prop));
573 property_get("persist.camera.cacmode.disable", prop, "0");
574 m_cacModeDisabled = (uint8_t)atoi(prop);
575
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 //Load and read GPU library.
577 lib_surface_utils = NULL;
578 LINK_get_surface_pixel_alignment = NULL;
579 mSurfaceStridePadding = CAM_PAD_TO_32;
580 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
581 if (lib_surface_utils) {
582 *(void **)&LINK_get_surface_pixel_alignment =
583 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
584 if (LINK_get_surface_pixel_alignment) {
585 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
586 }
587 dlclose(lib_surface_utils);
588 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700589
Emilian Peev0f3c3162017-03-15 12:57:46 +0000590 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
591 mPDSupported = (0 <= mPDIndex) ? true : false;
592
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700593 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700594}
595
596/*===========================================================================
597 * FUNCTION : ~QCamera3HardwareInterface
598 *
599 * DESCRIPTION: destructor of QCamera3HardwareInterface
600 *
601 * PARAMETERS : none
602 *
603 * RETURN : none
604 *==========================================================================*/
605QCamera3HardwareInterface::~QCamera3HardwareInterface()
606{
607 LOGD("E");
608
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800609 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700610
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800611 // Disable power hint and enable the perf lock for close camera
612 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
613 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
614
615 // unlink of dualcam during close camera
616 if (mIsDeviceLinked) {
617 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
618 &m_pDualCamCmdPtr->bundle_info;
619 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
620 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
621 pthread_mutex_lock(&gCamLock);
622
623 if (mIsMainCamera == 1) {
624 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
625 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
626 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
627 // related session id should be session id of linked session
628 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
629 } else {
630 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
631 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
632 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
633 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
634 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800635 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800636 pthread_mutex_unlock(&gCamLock);
637
638 rc = mCameraHandle->ops->set_dual_cam_cmd(
639 mCameraHandle->camera_handle);
640 if (rc < 0) {
641 LOGE("Dualcam: Unlink failed, but still proceed to close");
642 }
643 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700644
645 /* We need to stop all streams before deleting any stream */
646 if (mRawDumpChannel) {
647 mRawDumpChannel->stop();
648 }
649
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700650 if (mHdrPlusRawSrcChannel) {
651 mHdrPlusRawSrcChannel->stop();
652 }
653
Thierry Strudel3d639192016-09-09 11:52:26 -0700654 // NOTE: 'camera3_stream_t *' objects are already freed at
655 // this stage by the framework
656 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
657 it != mStreamInfo.end(); it++) {
658 QCamera3ProcessingChannel *channel = (*it)->channel;
659 if (channel) {
660 channel->stop();
661 }
662 }
663 if (mSupportChannel)
664 mSupportChannel->stop();
665
666 if (mAnalysisChannel) {
667 mAnalysisChannel->stop();
668 }
669 if (mMetadataChannel) {
670 mMetadataChannel->stop();
671 }
672 if (mChannelHandle) {
673 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
674 mChannelHandle);
675 LOGD("stopping channel %d", mChannelHandle);
676 }
677
678 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
679 it != mStreamInfo.end(); it++) {
680 QCamera3ProcessingChannel *channel = (*it)->channel;
681 if (channel)
682 delete channel;
683 free (*it);
684 }
685 if (mSupportChannel) {
686 delete mSupportChannel;
687 mSupportChannel = NULL;
688 }
689
690 if (mAnalysisChannel) {
691 delete mAnalysisChannel;
692 mAnalysisChannel = NULL;
693 }
694 if (mRawDumpChannel) {
695 delete mRawDumpChannel;
696 mRawDumpChannel = NULL;
697 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700698 if (mHdrPlusRawSrcChannel) {
699 delete mHdrPlusRawSrcChannel;
700 mHdrPlusRawSrcChannel = NULL;
701 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700702 if (mDummyBatchChannel) {
703 delete mDummyBatchChannel;
704 mDummyBatchChannel = NULL;
705 }
706
707 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800708 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700709
710 if (mMetadataChannel) {
711 delete mMetadataChannel;
712 mMetadataChannel = NULL;
713 }
714
715 /* Clean up all channels */
716 if (mCameraInitialized) {
717 if(!mFirstConfiguration){
718 //send the last unconfigure
719 cam_stream_size_info_t stream_config_info;
720 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
721 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
722 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800723 m_bIs4KVideo ? 0 :
724 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700725 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700726 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
727 stream_config_info);
728 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
729 if (rc < 0) {
730 LOGE("set_parms failed for unconfigure");
731 }
732 }
733 deinitParameters();
734 }
735
736 if (mChannelHandle) {
737 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
738 mChannelHandle);
739 LOGH("deleting channel %d", mChannelHandle);
740 mChannelHandle = 0;
741 }
742
743 if (mState != CLOSED)
744 closeCamera();
745
746 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
747 req.mPendingBufferList.clear();
748 }
749 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700750 for (pendingRequestIterator i = mPendingRequestsList.begin();
751 i != mPendingRequestsList.end();) {
752 i = erasePendingRequest(i);
753 }
754 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
755 if (mDefaultMetadata[i])
756 free_camera_metadata(mDefaultMetadata[i]);
757
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800758 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700759
760 pthread_cond_destroy(&mRequestCond);
761
762 pthread_cond_destroy(&mBuffersCond);
763
764 pthread_mutex_destroy(&mMutex);
765 LOGD("X");
766}
767
768/*===========================================================================
769 * FUNCTION : erasePendingRequest
770 *
771 * DESCRIPTION: function to erase a desired pending request after freeing any
772 * allocated memory
773 *
774 * PARAMETERS :
775 * @i : iterator pointing to pending request to be erased
776 *
777 * RETURN : iterator pointing to the next request
778 *==========================================================================*/
779QCamera3HardwareInterface::pendingRequestIterator
780 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
781{
782 if (i->input_buffer != NULL) {
783 free(i->input_buffer);
784 i->input_buffer = NULL;
785 }
786 if (i->settings != NULL)
787 free_camera_metadata((camera_metadata_t*)i->settings);
788 return mPendingRequestsList.erase(i);
789}
790
791/*===========================================================================
792 * FUNCTION : camEvtHandle
793 *
794 * DESCRIPTION: Function registered to mm-camera-interface to handle events
795 *
796 * PARAMETERS :
797 * @camera_handle : interface layer camera handle
798 * @evt : ptr to event
799 * @user_data : user data ptr
800 *
801 * RETURN : none
802 *==========================================================================*/
803void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
804 mm_camera_event_t *evt,
805 void *user_data)
806{
807 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
808 if (obj && evt) {
809 switch(evt->server_event_type) {
810 case CAM_EVENT_TYPE_DAEMON_DIED:
811 pthread_mutex_lock(&obj->mMutex);
812 obj->mState = ERROR;
813 pthread_mutex_unlock(&obj->mMutex);
814 LOGE("Fatal, camera daemon died");
815 break;
816
817 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
818 LOGD("HAL got request pull from Daemon");
819 pthread_mutex_lock(&obj->mMutex);
820 obj->mWokenUpByDaemon = true;
821 obj->unblockRequestIfNecessary();
822 pthread_mutex_unlock(&obj->mMutex);
823 break;
824
825 default:
826 LOGW("Warning: Unhandled event %d",
827 evt->server_event_type);
828 break;
829 }
830 } else {
831 LOGE("NULL user_data/evt");
832 }
833}
834
835/*===========================================================================
836 * FUNCTION : openCamera
837 *
838 * DESCRIPTION: open camera
839 *
840 * PARAMETERS :
841 * @hw_device : double ptr for camera device struct
842 *
843 * RETURN : int32_t type of status
844 * NO_ERROR -- success
845 * none-zero failure code
846 *==========================================================================*/
847int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
848{
849 int rc = 0;
850 if (mState != CLOSED) {
851 *hw_device = NULL;
852 return PERMISSION_DENIED;
853 }
854
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800855 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700856 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
857 mCameraId);
858
859 rc = openCamera();
860 if (rc == 0) {
861 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800862 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700863 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800864 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700865
Thierry Strudel3d639192016-09-09 11:52:26 -0700866 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
867 mCameraId, rc);
868
869 if (rc == NO_ERROR) {
870 mState = OPENED;
871 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800872
Thierry Strudel3d639192016-09-09 11:52:26 -0700873 return rc;
874}
875
876/*===========================================================================
877 * FUNCTION : openCamera
878 *
879 * DESCRIPTION: open camera
880 *
881 * PARAMETERS : none
882 *
883 * RETURN : int32_t type of status
884 * NO_ERROR -- success
885 * none-zero failure code
886 *==========================================================================*/
887int QCamera3HardwareInterface::openCamera()
888{
889 int rc = 0;
890 char value[PROPERTY_VALUE_MAX];
891
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800892 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700893 if (mCameraHandle) {
894 LOGE("Failure: Camera already opened");
895 return ALREADY_EXISTS;
896 }
897
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700898 {
899 Mutex::Autolock l(gHdrPlusClientLock);
900 if (gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700901 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700902 rc = gEaselManagerClient.resume();
903 if (rc != 0) {
904 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
905 return rc;
906 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800907 }
908 }
909
Thierry Strudel3d639192016-09-09 11:52:26 -0700910 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
911 if (rc < 0) {
912 LOGE("Failed to reserve flash for camera id: %d",
913 mCameraId);
914 return UNKNOWN_ERROR;
915 }
916
917 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
918 if (rc) {
919 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
920 return rc;
921 }
922
923 if (!mCameraHandle) {
924 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
925 return -ENODEV;
926 }
927
928 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
929 camEvtHandle, (void *)this);
930
931 if (rc < 0) {
932 LOGE("Error, failed to register event callback");
933 /* Not closing camera here since it is already handled in destructor */
934 return FAILED_TRANSACTION;
935 }
936
937 mExifParams.debug_params =
938 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
939 if (mExifParams.debug_params) {
940 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
941 } else {
942 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
943 return NO_MEMORY;
944 }
945 mFirstConfiguration = true;
946
947 //Notify display HAL that a camera session is active.
948 //But avoid calling the same during bootup because camera service might open/close
949 //cameras at boot time during its initialization and display service will also internally
950 //wait for camera service to initialize first while calling this display API, resulting in a
951 //deadlock situation. Since boot time camera open/close calls are made only to fetch
952 //capabilities, no need of this display bw optimization.
953 //Use "service.bootanim.exit" property to know boot status.
954 property_get("service.bootanim.exit", value, "0");
955 if (atoi(value) == 1) {
956 pthread_mutex_lock(&gCamLock);
957 if (gNumCameraSessions++ == 0) {
958 setCameraLaunchStatus(true);
959 }
960 pthread_mutex_unlock(&gCamLock);
961 }
962
963 //fill the session id needed while linking dual cam
964 pthread_mutex_lock(&gCamLock);
965 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
966 &sessionId[mCameraId]);
967 pthread_mutex_unlock(&gCamLock);
968
969 if (rc < 0) {
970 LOGE("Error, failed to get sessiion id");
971 return UNKNOWN_ERROR;
972 } else {
973 //Allocate related cam sync buffer
974 //this is needed for the payload that goes along with bundling cmd for related
975 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700976 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
977 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700978 if(rc != OK) {
979 rc = NO_MEMORY;
980 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
981 return NO_MEMORY;
982 }
983
984 //Map memory for related cam sync buffer
985 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700986 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
987 m_pDualCamCmdHeap->getFd(0),
988 sizeof(cam_dual_camera_cmd_info_t),
989 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700990 if(rc < 0) {
991 LOGE("Dualcam: failed to map Related cam sync buffer");
992 rc = FAILED_TRANSACTION;
993 return NO_MEMORY;
994 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700995 m_pDualCamCmdPtr =
996 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700997 }
998
999 LOGH("mCameraId=%d",mCameraId);
1000
1001 return NO_ERROR;
1002}
1003
1004/*===========================================================================
1005 * FUNCTION : closeCamera
1006 *
1007 * DESCRIPTION: close camera
1008 *
1009 * PARAMETERS : none
1010 *
1011 * RETURN : int32_t type of status
1012 * NO_ERROR -- success
1013 * none-zero failure code
1014 *==========================================================================*/
1015int QCamera3HardwareInterface::closeCamera()
1016{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001017 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001018 int rc = NO_ERROR;
1019 char value[PROPERTY_VALUE_MAX];
1020
1021 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1022 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001023
1024 // unmap memory for related cam sync buffer
1025 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001026 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001027 if (NULL != m_pDualCamCmdHeap) {
1028 m_pDualCamCmdHeap->deallocate();
1029 delete m_pDualCamCmdHeap;
1030 m_pDualCamCmdHeap = NULL;
1031 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001032 }
1033
Thierry Strudel3d639192016-09-09 11:52:26 -07001034 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1035 mCameraHandle = NULL;
1036
1037 //reset session id to some invalid id
1038 pthread_mutex_lock(&gCamLock);
1039 sessionId[mCameraId] = 0xDEADBEEF;
1040 pthread_mutex_unlock(&gCamLock);
1041
1042 //Notify display HAL that there is no active camera session
1043 //but avoid calling the same during bootup. Refer to openCamera
1044 //for more details.
1045 property_get("service.bootanim.exit", value, "0");
1046 if (atoi(value) == 1) {
1047 pthread_mutex_lock(&gCamLock);
1048 if (--gNumCameraSessions == 0) {
1049 setCameraLaunchStatus(false);
1050 }
1051 pthread_mutex_unlock(&gCamLock);
1052 }
1053
Thierry Strudel3d639192016-09-09 11:52:26 -07001054 if (mExifParams.debug_params) {
1055 free(mExifParams.debug_params);
1056 mExifParams.debug_params = NULL;
1057 }
1058 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1059 LOGW("Failed to release flash for camera id: %d",
1060 mCameraId);
1061 }
1062 mState = CLOSED;
1063 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1064 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001065
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001066 {
1067 Mutex::Autolock l(gHdrPlusClientLock);
1068 if (gHdrPlusClient != nullptr) {
1069 // Disable HDR+ mode.
1070 disableHdrPlusModeLocked();
1071 // Disconnect Easel if it's connected.
1072 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
1073 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001074 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001075
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001076 if (EaselManagerClientOpened) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001077 rc = gEaselManagerClient.stopMipi(mCameraId);
1078 if (rc != 0) {
1079 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1080 }
1081
1082 rc = gEaselManagerClient.suspend();
1083 if (rc != 0) {
1084 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1085 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001086 }
1087 }
1088
Thierry Strudel3d639192016-09-09 11:52:26 -07001089 return rc;
1090}
1091
1092/*===========================================================================
1093 * FUNCTION : initialize
1094 *
1095 * DESCRIPTION: Initialize frameworks callback functions
1096 *
1097 * PARAMETERS :
1098 * @callback_ops : callback function to frameworks
1099 *
1100 * RETURN :
1101 *
1102 *==========================================================================*/
1103int QCamera3HardwareInterface::initialize(
1104 const struct camera3_callback_ops *callback_ops)
1105{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001106 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001107 int rc;
1108
1109 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1110 pthread_mutex_lock(&mMutex);
1111
1112 // Validate current state
1113 switch (mState) {
1114 case OPENED:
1115 /* valid state */
1116 break;
1117 default:
1118 LOGE("Invalid state %d", mState);
1119 rc = -ENODEV;
1120 goto err1;
1121 }
1122
1123 rc = initParameters();
1124 if (rc < 0) {
1125 LOGE("initParamters failed %d", rc);
1126 goto err1;
1127 }
1128 mCallbackOps = callback_ops;
1129
1130 mChannelHandle = mCameraHandle->ops->add_channel(
1131 mCameraHandle->camera_handle, NULL, NULL, this);
1132 if (mChannelHandle == 0) {
1133 LOGE("add_channel failed");
1134 rc = -ENOMEM;
1135 pthread_mutex_unlock(&mMutex);
1136 return rc;
1137 }
1138
1139 pthread_mutex_unlock(&mMutex);
1140 mCameraInitialized = true;
1141 mState = INITIALIZED;
1142 LOGI("X");
1143 return 0;
1144
1145err1:
1146 pthread_mutex_unlock(&mMutex);
1147 return rc;
1148}
1149
1150/*===========================================================================
1151 * FUNCTION : validateStreamDimensions
1152 *
1153 * DESCRIPTION: Check if the configuration requested are those advertised
1154 *
1155 * PARAMETERS :
1156 * @stream_list : streams to be configured
1157 *
1158 * RETURN :
1159 *
1160 *==========================================================================*/
1161int QCamera3HardwareInterface::validateStreamDimensions(
1162 camera3_stream_configuration_t *streamList)
1163{
1164 int rc = NO_ERROR;
1165 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001166 uint32_t depthWidth = 0;
1167 uint32_t depthHeight = 0;
1168 if (mPDSupported) {
1169 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1170 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1171 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001172
1173 camera3_stream_t *inputStream = NULL;
1174 /*
1175 * Loop through all streams to find input stream if it exists*
1176 */
1177 for (size_t i = 0; i< streamList->num_streams; i++) {
1178 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1179 if (inputStream != NULL) {
1180 LOGE("Error, Multiple input streams requested");
1181 return -EINVAL;
1182 }
1183 inputStream = streamList->streams[i];
1184 }
1185 }
1186 /*
1187 * Loop through all streams requested in configuration
1188 * Check if unsupported sizes have been requested on any of them
1189 */
1190 for (size_t j = 0; j < streamList->num_streams; j++) {
1191 bool sizeFound = false;
1192 camera3_stream_t *newStream = streamList->streams[j];
1193
1194 uint32_t rotatedHeight = newStream->height;
1195 uint32_t rotatedWidth = newStream->width;
1196 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1197 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1198 rotatedHeight = newStream->width;
1199 rotatedWidth = newStream->height;
1200 }
1201
1202 /*
1203 * Sizes are different for each type of stream format check against
1204 * appropriate table.
1205 */
1206 switch (newStream->format) {
1207 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1208 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1209 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001210 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1211 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1212 mPDSupported) {
1213 if ((depthWidth == newStream->width) &&
1214 (depthHeight == newStream->height)) {
1215 sizeFound = true;
1216 }
1217 break;
1218 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001219 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1220 for (size_t i = 0; i < count; i++) {
1221 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1222 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1223 sizeFound = true;
1224 break;
1225 }
1226 }
1227 break;
1228 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001229 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1230 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001231 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001232 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001233 if ((depthSamplesCount == newStream->width) &&
1234 (1 == newStream->height)) {
1235 sizeFound = true;
1236 }
1237 break;
1238 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001239 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1240 /* Verify set size against generated sizes table */
1241 for (size_t i = 0; i < count; i++) {
1242 if (((int32_t)rotatedWidth ==
1243 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1244 ((int32_t)rotatedHeight ==
1245 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1246 sizeFound = true;
1247 break;
1248 }
1249 }
1250 break;
1251 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1252 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1253 default:
1254 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1255 || newStream->stream_type == CAMERA3_STREAM_INPUT
1256 || IS_USAGE_ZSL(newStream->usage)) {
1257 if (((int32_t)rotatedWidth ==
1258 gCamCapability[mCameraId]->active_array_size.width) &&
1259 ((int32_t)rotatedHeight ==
1260 gCamCapability[mCameraId]->active_array_size.height)) {
1261 sizeFound = true;
1262 break;
1263 }
1264 /* We could potentially break here to enforce ZSL stream
1265 * set from frameworks always is full active array size
1266 * but it is not clear from the spc if framework will always
1267 * follow that, also we have logic to override to full array
1268 * size, so keeping the logic lenient at the moment
1269 */
1270 }
1271 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1272 MAX_SIZES_CNT);
1273 for (size_t i = 0; i < count; i++) {
1274 if (((int32_t)rotatedWidth ==
1275 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1276 ((int32_t)rotatedHeight ==
1277 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1278 sizeFound = true;
1279 break;
1280 }
1281 }
1282 break;
1283 } /* End of switch(newStream->format) */
1284
1285 /* We error out even if a single stream has unsupported size set */
1286 if (!sizeFound) {
1287 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1288 rotatedWidth, rotatedHeight, newStream->format,
1289 gCamCapability[mCameraId]->active_array_size.width,
1290 gCamCapability[mCameraId]->active_array_size.height);
1291 rc = -EINVAL;
1292 break;
1293 }
1294 } /* End of for each stream */
1295 return rc;
1296}
1297
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001298/*===========================================================================
1299 * FUNCTION : validateUsageFlags
1300 *
1301 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1302 *
1303 * PARAMETERS :
1304 * @stream_list : streams to be configured
1305 *
1306 * RETURN :
1307 * NO_ERROR if the usage flags are supported
1308 * error code if usage flags are not supported
1309 *
1310 *==========================================================================*/
1311int QCamera3HardwareInterface::validateUsageFlags(
1312 const camera3_stream_configuration_t* streamList)
1313{
1314 for (size_t j = 0; j < streamList->num_streams; j++) {
1315 const camera3_stream_t *newStream = streamList->streams[j];
1316
1317 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1318 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1319 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1320 continue;
1321 }
1322
1323 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1324 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1325 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1326 bool forcePreviewUBWC = true;
1327 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1328 forcePreviewUBWC = false;
1329 }
1330 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
1331 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC);
1332 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
1333 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC);
1334 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
1335 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC);
1336
1337 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1338 // So color spaces will always match.
1339
1340 // Check whether underlying formats of shared streams match.
1341 if (isVideo && isPreview && videoFormat != previewFormat) {
1342 LOGE("Combined video and preview usage flag is not supported");
1343 return -EINVAL;
1344 }
1345 if (isPreview && isZSL && previewFormat != zslFormat) {
1346 LOGE("Combined preview and zsl usage flag is not supported");
1347 return -EINVAL;
1348 }
1349 if (isVideo && isZSL && videoFormat != zslFormat) {
1350 LOGE("Combined video and zsl usage flag is not supported");
1351 return -EINVAL;
1352 }
1353 }
1354 return NO_ERROR;
1355}
1356
1357/*===========================================================================
1358 * FUNCTION : validateUsageFlagsForEis
1359 *
1360 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1361 *
1362 * PARAMETERS :
1363 * @stream_list : streams to be configured
1364 *
1365 * RETURN :
1366 * NO_ERROR if the usage flags are supported
1367 * error code if usage flags are not supported
1368 *
1369 *==========================================================================*/
1370int QCamera3HardwareInterface::validateUsageFlagsForEis(
1371 const camera3_stream_configuration_t* streamList)
1372{
1373 for (size_t j = 0; j < streamList->num_streams; j++) {
1374 const camera3_stream_t *newStream = streamList->streams[j];
1375
1376 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1377 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1378
1379 // Because EIS is "hard-coded" for certain use case, and current
1380 // implementation doesn't support shared preview and video on the same
1381 // stream, return failure if EIS is forced on.
1382 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1383 LOGE("Combined video and preview usage flag is not supported due to EIS");
1384 return -EINVAL;
1385 }
1386 }
1387 return NO_ERROR;
1388}
1389
Thierry Strudel3d639192016-09-09 11:52:26 -07001390/*==============================================================================
1391 * FUNCTION : isSupportChannelNeeded
1392 *
1393 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1394 *
1395 * PARAMETERS :
1396 * @stream_list : streams to be configured
1397 * @stream_config_info : the config info for streams to be configured
1398 *
1399 * RETURN : Boolen true/false decision
1400 *
1401 *==========================================================================*/
1402bool QCamera3HardwareInterface::isSupportChannelNeeded(
1403 camera3_stream_configuration_t *streamList,
1404 cam_stream_size_info_t stream_config_info)
1405{
1406 uint32_t i;
1407 bool pprocRequested = false;
1408 /* Check for conditions where PProc pipeline does not have any streams*/
1409 for (i = 0; i < stream_config_info.num_streams; i++) {
1410 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1411 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1412 pprocRequested = true;
1413 break;
1414 }
1415 }
1416
1417 if (pprocRequested == false )
1418 return true;
1419
1420 /* Dummy stream needed if only raw or jpeg streams present */
1421 for (i = 0; i < streamList->num_streams; i++) {
1422 switch(streamList->streams[i]->format) {
1423 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1424 case HAL_PIXEL_FORMAT_RAW10:
1425 case HAL_PIXEL_FORMAT_RAW16:
1426 case HAL_PIXEL_FORMAT_BLOB:
1427 break;
1428 default:
1429 return false;
1430 }
1431 }
1432 return true;
1433}
1434
1435/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001436 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001437 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001438 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001439 *
1440 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001441 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001442 *
1443 * RETURN : int32_t type of status
1444 * NO_ERROR -- success
1445 * none-zero failure code
1446 *
1447 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001448int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001449{
1450 int32_t rc = NO_ERROR;
1451
1452 cam_dimension_t max_dim = {0, 0};
1453 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1454 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1455 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1456 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1457 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1458 }
1459
1460 clear_metadata_buffer(mParameters);
1461
1462 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1463 max_dim);
1464 if (rc != NO_ERROR) {
1465 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1466 return rc;
1467 }
1468
1469 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1470 if (rc != NO_ERROR) {
1471 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1472 return rc;
1473 }
1474
1475 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001476 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001477
1478 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1479 mParameters);
1480 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001481 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001482 return rc;
1483 }
1484
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001485 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001486 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1487 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1488 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1489 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1490 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001491
1492 return rc;
1493}
1494
1495/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001496 * FUNCTION : addToPPFeatureMask
1497 *
1498 * DESCRIPTION: add additional features to pp feature mask based on
1499 * stream type and usecase
1500 *
1501 * PARAMETERS :
1502 * @stream_format : stream type for feature mask
1503 * @stream_idx : stream idx within postprocess_mask list to change
1504 *
1505 * RETURN : NULL
1506 *
1507 *==========================================================================*/
1508void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1509 uint32_t stream_idx)
1510{
1511 char feature_mask_value[PROPERTY_VALUE_MAX];
1512 cam_feature_mask_t feature_mask;
1513 int args_converted;
1514 int property_len;
1515
1516 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001517#ifdef _LE_CAMERA_
1518 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1519 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1520 property_len = property_get("persist.camera.hal3.feature",
1521 feature_mask_value, swtnr_feature_mask_value);
1522#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001523 property_len = property_get("persist.camera.hal3.feature",
1524 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001525#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001526 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1527 (feature_mask_value[1] == 'x')) {
1528 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1529 } else {
1530 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1531 }
1532 if (1 != args_converted) {
1533 feature_mask = 0;
1534 LOGE("Wrong feature mask %s", feature_mask_value);
1535 return;
1536 }
1537
1538 switch (stream_format) {
1539 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1540 /* Add LLVD to pp feature mask only if video hint is enabled */
1541 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1542 mStreamConfigInfo.postprocess_mask[stream_idx]
1543 |= CAM_QTI_FEATURE_SW_TNR;
1544 LOGH("Added SW TNR to pp feature mask");
1545 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1546 mStreamConfigInfo.postprocess_mask[stream_idx]
1547 |= CAM_QCOM_FEATURE_LLVD;
1548 LOGH("Added LLVD SeeMore to pp feature mask");
1549 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001550 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1551 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1552 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1553 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001554 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1555 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1556 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1557 CAM_QTI_FEATURE_BINNING_CORRECTION;
1558 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001559 break;
1560 }
1561 default:
1562 break;
1563 }
1564 LOGD("PP feature mask %llx",
1565 mStreamConfigInfo.postprocess_mask[stream_idx]);
1566}
1567
1568/*==============================================================================
1569 * FUNCTION : updateFpsInPreviewBuffer
1570 *
1571 * DESCRIPTION: update FPS information in preview buffer.
1572 *
1573 * PARAMETERS :
1574 * @metadata : pointer to metadata buffer
1575 * @frame_number: frame_number to look for in pending buffer list
1576 *
1577 * RETURN : None
1578 *
1579 *==========================================================================*/
1580void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1581 uint32_t frame_number)
1582{
1583 // Mark all pending buffers for this particular request
1584 // with corresponding framerate information
1585 for (List<PendingBuffersInRequest>::iterator req =
1586 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1587 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1588 for(List<PendingBufferInfo>::iterator j =
1589 req->mPendingBufferList.begin();
1590 j != req->mPendingBufferList.end(); j++) {
1591 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1592 if ((req->frame_number == frame_number) &&
1593 (channel->getStreamTypeMask() &
1594 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1595 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1596 CAM_INTF_PARM_FPS_RANGE, metadata) {
1597 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1598 struct private_handle_t *priv_handle =
1599 (struct private_handle_t *)(*(j->buffer));
1600 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1601 }
1602 }
1603 }
1604 }
1605}
1606
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001607/*==============================================================================
1608 * FUNCTION : updateTimeStampInPendingBuffers
1609 *
1610 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1611 * of a frame number
1612 *
1613 * PARAMETERS :
1614 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1615 * @timestamp : timestamp to be set
1616 *
1617 * RETURN : None
1618 *
1619 *==========================================================================*/
1620void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1621 uint32_t frameNumber, nsecs_t timestamp)
1622{
1623 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1624 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1625 if (req->frame_number != frameNumber)
1626 continue;
1627
1628 for (auto k = req->mPendingBufferList.begin();
1629 k != req->mPendingBufferList.end(); k++ ) {
1630 struct private_handle_t *priv_handle =
1631 (struct private_handle_t *) (*(k->buffer));
1632 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1633 }
1634 }
1635 return;
1636}
1637
Thierry Strudel3d639192016-09-09 11:52:26 -07001638/*===========================================================================
1639 * FUNCTION : configureStreams
1640 *
1641 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1642 * and output streams.
1643 *
1644 * PARAMETERS :
1645 * @stream_list : streams to be configured
1646 *
1647 * RETURN :
1648 *
1649 *==========================================================================*/
1650int QCamera3HardwareInterface::configureStreams(
1651 camera3_stream_configuration_t *streamList)
1652{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001653 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001654 int rc = 0;
1655
1656 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001657 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001658 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001659 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001660
1661 return rc;
1662}
1663
1664/*===========================================================================
1665 * FUNCTION : configureStreamsPerfLocked
1666 *
1667 * DESCRIPTION: configureStreams while perfLock is held.
1668 *
1669 * PARAMETERS :
1670 * @stream_list : streams to be configured
1671 *
1672 * RETURN : int32_t type of status
1673 * NO_ERROR -- success
1674 * none-zero failure code
1675 *==========================================================================*/
1676int QCamera3HardwareInterface::configureStreamsPerfLocked(
1677 camera3_stream_configuration_t *streamList)
1678{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001679 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001680 int rc = 0;
1681
1682 // Sanity check stream_list
1683 if (streamList == NULL) {
1684 LOGE("NULL stream configuration");
1685 return BAD_VALUE;
1686 }
1687 if (streamList->streams == NULL) {
1688 LOGE("NULL stream list");
1689 return BAD_VALUE;
1690 }
1691
1692 if (streamList->num_streams < 1) {
1693 LOGE("Bad number of streams requested: %d",
1694 streamList->num_streams);
1695 return BAD_VALUE;
1696 }
1697
1698 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1699 LOGE("Maximum number of streams %d exceeded: %d",
1700 MAX_NUM_STREAMS, streamList->num_streams);
1701 return BAD_VALUE;
1702 }
1703
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001704 rc = validateUsageFlags(streamList);
1705 if (rc != NO_ERROR) {
1706 return rc;
1707 }
1708
Thierry Strudel3d639192016-09-09 11:52:26 -07001709 mOpMode = streamList->operation_mode;
1710 LOGD("mOpMode: %d", mOpMode);
1711
1712 /* first invalidate all the steams in the mStreamList
1713 * if they appear again, they will be validated */
1714 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1715 it != mStreamInfo.end(); it++) {
1716 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1717 if (channel) {
1718 channel->stop();
1719 }
1720 (*it)->status = INVALID;
1721 }
1722
1723 if (mRawDumpChannel) {
1724 mRawDumpChannel->stop();
1725 delete mRawDumpChannel;
1726 mRawDumpChannel = NULL;
1727 }
1728
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001729 if (mHdrPlusRawSrcChannel) {
1730 mHdrPlusRawSrcChannel->stop();
1731 delete mHdrPlusRawSrcChannel;
1732 mHdrPlusRawSrcChannel = NULL;
1733 }
1734
Thierry Strudel3d639192016-09-09 11:52:26 -07001735 if (mSupportChannel)
1736 mSupportChannel->stop();
1737
1738 if (mAnalysisChannel) {
1739 mAnalysisChannel->stop();
1740 }
1741 if (mMetadataChannel) {
1742 /* If content of mStreamInfo is not 0, there is metadata stream */
1743 mMetadataChannel->stop();
1744 }
1745 if (mChannelHandle) {
1746 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1747 mChannelHandle);
1748 LOGD("stopping channel %d", mChannelHandle);
1749 }
1750
1751 pthread_mutex_lock(&mMutex);
1752
1753 // Check state
1754 switch (mState) {
1755 case INITIALIZED:
1756 case CONFIGURED:
1757 case STARTED:
1758 /* valid state */
1759 break;
1760 default:
1761 LOGE("Invalid state %d", mState);
1762 pthread_mutex_unlock(&mMutex);
1763 return -ENODEV;
1764 }
1765
1766 /* Check whether we have video stream */
1767 m_bIs4KVideo = false;
1768 m_bIsVideo = false;
1769 m_bEisSupportedSize = false;
1770 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001771 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001772 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001773 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001774 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001775 uint32_t videoWidth = 0U;
1776 uint32_t videoHeight = 0U;
1777 size_t rawStreamCnt = 0;
1778 size_t stallStreamCnt = 0;
1779 size_t processedStreamCnt = 0;
1780 // Number of streams on ISP encoder path
1781 size_t numStreamsOnEncoder = 0;
1782 size_t numYuv888OnEncoder = 0;
1783 bool bYuv888OverrideJpeg = false;
1784 cam_dimension_t largeYuv888Size = {0, 0};
1785 cam_dimension_t maxViewfinderSize = {0, 0};
1786 bool bJpegExceeds4K = false;
1787 bool bJpegOnEncoder = false;
1788 bool bUseCommonFeatureMask = false;
1789 cam_feature_mask_t commonFeatureMask = 0;
1790 bool bSmallJpegSize = false;
1791 uint32_t width_ratio;
1792 uint32_t height_ratio;
1793 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1794 camera3_stream_t *inputStream = NULL;
1795 bool isJpeg = false;
1796 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001797 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001798 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001799
1800 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1801
1802 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001803 uint8_t eis_prop_set;
1804 uint32_t maxEisWidth = 0;
1805 uint32_t maxEisHeight = 0;
1806
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001807 // Initialize all instant AEC related variables
1808 mInstantAEC = false;
1809 mResetInstantAEC = false;
1810 mInstantAECSettledFrameNumber = 0;
1811 mAecSkipDisplayFrameBound = 0;
1812 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001813 mCurrFeatureState = 0;
1814 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001815
Thierry Strudel3d639192016-09-09 11:52:26 -07001816 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1817
1818 size_t count = IS_TYPE_MAX;
1819 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1820 for (size_t i = 0; i < count; i++) {
1821 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001822 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1823 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001824 break;
1825 }
1826 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001827
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001828 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001829 maxEisWidth = MAX_EIS_WIDTH;
1830 maxEisHeight = MAX_EIS_HEIGHT;
1831 }
1832
1833 /* EIS setprop control */
1834 char eis_prop[PROPERTY_VALUE_MAX];
1835 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001836 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001837 eis_prop_set = (uint8_t)atoi(eis_prop);
1838
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001839 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001840 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1841
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001842 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1843 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001844
Thierry Strudel3d639192016-09-09 11:52:26 -07001845 /* stream configurations */
1846 for (size_t i = 0; i < streamList->num_streams; i++) {
1847 camera3_stream_t *newStream = streamList->streams[i];
1848 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1849 "height = %d, rotation = %d, usage = 0x%x",
1850 i, newStream->stream_type, newStream->format,
1851 newStream->width, newStream->height, newStream->rotation,
1852 newStream->usage);
1853 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1854 newStream->stream_type == CAMERA3_STREAM_INPUT){
1855 isZsl = true;
1856 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001857 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1858 IS_USAGE_PREVIEW(newStream->usage)) {
1859 isPreview = true;
1860 }
1861
Thierry Strudel3d639192016-09-09 11:52:26 -07001862 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1863 inputStream = newStream;
1864 }
1865
Emilian Peev7650c122017-01-19 08:24:33 -08001866 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1867 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001868 isJpeg = true;
1869 jpegSize.width = newStream->width;
1870 jpegSize.height = newStream->height;
1871 if (newStream->width > VIDEO_4K_WIDTH ||
1872 newStream->height > VIDEO_4K_HEIGHT)
1873 bJpegExceeds4K = true;
1874 }
1875
1876 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1877 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1878 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001879 // In HAL3 we can have multiple different video streams.
1880 // The variables video width and height are used below as
1881 // dimensions of the biggest of them
1882 if (videoWidth < newStream->width ||
1883 videoHeight < newStream->height) {
1884 videoWidth = newStream->width;
1885 videoHeight = newStream->height;
1886 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001887 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1888 (VIDEO_4K_HEIGHT <= newStream->height)) {
1889 m_bIs4KVideo = true;
1890 }
1891 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1892 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001893
Thierry Strudel3d639192016-09-09 11:52:26 -07001894 }
1895 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1896 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1897 switch (newStream->format) {
1898 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08001899 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
1900 depthPresent = true;
1901 break;
1902 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001903 stallStreamCnt++;
1904 if (isOnEncoder(maxViewfinderSize, newStream->width,
1905 newStream->height)) {
1906 numStreamsOnEncoder++;
1907 bJpegOnEncoder = true;
1908 }
1909 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1910 newStream->width);
1911 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1912 newStream->height);;
1913 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1914 "FATAL: max_downscale_factor cannot be zero and so assert");
1915 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1916 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1917 LOGH("Setting small jpeg size flag to true");
1918 bSmallJpegSize = true;
1919 }
1920 break;
1921 case HAL_PIXEL_FORMAT_RAW10:
1922 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1923 case HAL_PIXEL_FORMAT_RAW16:
1924 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001925 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1926 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
1927 pdStatCount++;
1928 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001929 break;
1930 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1931 processedStreamCnt++;
1932 if (isOnEncoder(maxViewfinderSize, newStream->width,
1933 newStream->height)) {
1934 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1935 !IS_USAGE_ZSL(newStream->usage)) {
1936 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1937 }
1938 numStreamsOnEncoder++;
1939 }
1940 break;
1941 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1942 processedStreamCnt++;
1943 if (isOnEncoder(maxViewfinderSize, newStream->width,
1944 newStream->height)) {
1945 // If Yuv888 size is not greater than 4K, set feature mask
1946 // to SUPERSET so that it support concurrent request on
1947 // YUV and JPEG.
1948 if (newStream->width <= VIDEO_4K_WIDTH &&
1949 newStream->height <= VIDEO_4K_HEIGHT) {
1950 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1951 }
1952 numStreamsOnEncoder++;
1953 numYuv888OnEncoder++;
1954 largeYuv888Size.width = newStream->width;
1955 largeYuv888Size.height = newStream->height;
1956 }
1957 break;
1958 default:
1959 processedStreamCnt++;
1960 if (isOnEncoder(maxViewfinderSize, newStream->width,
1961 newStream->height)) {
1962 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1963 numStreamsOnEncoder++;
1964 }
1965 break;
1966 }
1967
1968 }
1969 }
1970
1971 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1972 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1973 !m_bIsVideo) {
1974 m_bEisEnable = false;
1975 }
1976
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001977 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
1978 pthread_mutex_unlock(&mMutex);
1979 return -EINVAL;
1980 }
1981
Thierry Strudel54dc9782017-02-15 12:12:10 -08001982 uint8_t forceEnableTnr = 0;
1983 char tnr_prop[PROPERTY_VALUE_MAX];
1984 memset(tnr_prop, 0, sizeof(tnr_prop));
1985 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
1986 forceEnableTnr = (uint8_t)atoi(tnr_prop);
1987
Thierry Strudel3d639192016-09-09 11:52:26 -07001988 /* Logic to enable/disable TNR based on specific config size/etc.*/
1989 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1990 ((videoWidth == 1920 && videoHeight == 1080) ||
1991 (videoWidth == 1280 && videoHeight == 720)) &&
1992 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1993 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001994 else if (forceEnableTnr)
1995 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001996
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001997 char videoHdrProp[PROPERTY_VALUE_MAX];
1998 memset(videoHdrProp, 0, sizeof(videoHdrProp));
1999 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2000 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2001
2002 if (hdr_mode_prop == 1 && m_bIsVideo &&
2003 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2004 m_bVideoHdrEnabled = true;
2005 else
2006 m_bVideoHdrEnabled = false;
2007
2008
Thierry Strudel3d639192016-09-09 11:52:26 -07002009 /* Check if num_streams is sane */
2010 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2011 rawStreamCnt > MAX_RAW_STREAMS ||
2012 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2013 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2014 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2015 pthread_mutex_unlock(&mMutex);
2016 return -EINVAL;
2017 }
2018 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002019 if (isZsl && m_bIs4KVideo) {
2020 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002021 pthread_mutex_unlock(&mMutex);
2022 return -EINVAL;
2023 }
2024 /* Check if stream sizes are sane */
2025 if (numStreamsOnEncoder > 2) {
2026 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2027 pthread_mutex_unlock(&mMutex);
2028 return -EINVAL;
2029 } else if (1 < numStreamsOnEncoder){
2030 bUseCommonFeatureMask = true;
2031 LOGH("Multiple streams above max viewfinder size, common mask needed");
2032 }
2033
2034 /* Check if BLOB size is greater than 4k in 4k recording case */
2035 if (m_bIs4KVideo && bJpegExceeds4K) {
2036 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2037 pthread_mutex_unlock(&mMutex);
2038 return -EINVAL;
2039 }
2040
Emilian Peev7650c122017-01-19 08:24:33 -08002041 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2042 depthPresent) {
2043 LOGE("HAL doesn't support depth streams in HFR mode!");
2044 pthread_mutex_unlock(&mMutex);
2045 return -EINVAL;
2046 }
2047
Thierry Strudel3d639192016-09-09 11:52:26 -07002048 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2049 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2050 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2051 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2052 // configurations:
2053 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2054 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2055 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2056 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2057 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2058 __func__);
2059 pthread_mutex_unlock(&mMutex);
2060 return -EINVAL;
2061 }
2062
2063 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2064 // the YUV stream's size is greater or equal to the JPEG size, set common
2065 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2066 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2067 jpegSize.width, jpegSize.height) &&
2068 largeYuv888Size.width > jpegSize.width &&
2069 largeYuv888Size.height > jpegSize.height) {
2070 bYuv888OverrideJpeg = true;
2071 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2072 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2073 }
2074
2075 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2076 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2077 commonFeatureMask);
2078 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2079 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2080
2081 rc = validateStreamDimensions(streamList);
2082 if (rc == NO_ERROR) {
2083 rc = validateStreamRotations(streamList);
2084 }
2085 if (rc != NO_ERROR) {
2086 LOGE("Invalid stream configuration requested!");
2087 pthread_mutex_unlock(&mMutex);
2088 return rc;
2089 }
2090
Emilian Peev0f3c3162017-03-15 12:57:46 +00002091 if (1 < pdStatCount) {
2092 LOGE("HAL doesn't support multiple PD streams");
2093 pthread_mutex_unlock(&mMutex);
2094 return -EINVAL;
2095 }
2096
2097 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2098 (1 == pdStatCount)) {
2099 LOGE("HAL doesn't support PD streams in HFR mode!");
2100 pthread_mutex_unlock(&mMutex);
2101 return -EINVAL;
2102 }
2103
Thierry Strudel3d639192016-09-09 11:52:26 -07002104 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2105 for (size_t i = 0; i < streamList->num_streams; i++) {
2106 camera3_stream_t *newStream = streamList->streams[i];
2107 LOGH("newStream type = %d, stream format = %d "
2108 "stream size : %d x %d, stream rotation = %d",
2109 newStream->stream_type, newStream->format,
2110 newStream->width, newStream->height, newStream->rotation);
2111 //if the stream is in the mStreamList validate it
2112 bool stream_exists = false;
2113 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2114 it != mStreamInfo.end(); it++) {
2115 if ((*it)->stream == newStream) {
2116 QCamera3ProcessingChannel *channel =
2117 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2118 stream_exists = true;
2119 if (channel)
2120 delete channel;
2121 (*it)->status = VALID;
2122 (*it)->stream->priv = NULL;
2123 (*it)->channel = NULL;
2124 }
2125 }
2126 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2127 //new stream
2128 stream_info_t* stream_info;
2129 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2130 if (!stream_info) {
2131 LOGE("Could not allocate stream info");
2132 rc = -ENOMEM;
2133 pthread_mutex_unlock(&mMutex);
2134 return rc;
2135 }
2136 stream_info->stream = newStream;
2137 stream_info->status = VALID;
2138 stream_info->channel = NULL;
2139 mStreamInfo.push_back(stream_info);
2140 }
2141 /* Covers Opaque ZSL and API1 F/W ZSL */
2142 if (IS_USAGE_ZSL(newStream->usage)
2143 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2144 if (zslStream != NULL) {
2145 LOGE("Multiple input/reprocess streams requested!");
2146 pthread_mutex_unlock(&mMutex);
2147 return BAD_VALUE;
2148 }
2149 zslStream = newStream;
2150 }
2151 /* Covers YUV reprocess */
2152 if (inputStream != NULL) {
2153 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2154 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2155 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2156 && inputStream->width == newStream->width
2157 && inputStream->height == newStream->height) {
2158 if (zslStream != NULL) {
2159 /* This scenario indicates multiple YUV streams with same size
2160 * as input stream have been requested, since zsl stream handle
2161 * is solely use for the purpose of overriding the size of streams
2162 * which share h/w streams we will just make a guess here as to
2163 * which of the stream is a ZSL stream, this will be refactored
2164 * once we make generic logic for streams sharing encoder output
2165 */
2166 LOGH("Warning, Multiple ip/reprocess streams requested!");
2167 }
2168 zslStream = newStream;
2169 }
2170 }
2171 }
2172
2173 /* If a zsl stream is set, we know that we have configured at least one input or
2174 bidirectional stream */
2175 if (NULL != zslStream) {
2176 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2177 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2178 mInputStreamInfo.format = zslStream->format;
2179 mInputStreamInfo.usage = zslStream->usage;
2180 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2181 mInputStreamInfo.dim.width,
2182 mInputStreamInfo.dim.height,
2183 mInputStreamInfo.format, mInputStreamInfo.usage);
2184 }
2185
2186 cleanAndSortStreamInfo();
2187 if (mMetadataChannel) {
2188 delete mMetadataChannel;
2189 mMetadataChannel = NULL;
2190 }
2191 if (mSupportChannel) {
2192 delete mSupportChannel;
2193 mSupportChannel = NULL;
2194 }
2195
2196 if (mAnalysisChannel) {
2197 delete mAnalysisChannel;
2198 mAnalysisChannel = NULL;
2199 }
2200
2201 if (mDummyBatchChannel) {
2202 delete mDummyBatchChannel;
2203 mDummyBatchChannel = NULL;
2204 }
2205
Emilian Peev7650c122017-01-19 08:24:33 -08002206 if (mDepthChannel) {
2207 mDepthChannel = NULL;
2208 }
2209
Thierry Strudel2896d122017-02-23 19:18:03 -08002210 char is_type_value[PROPERTY_VALUE_MAX];
2211 property_get("persist.camera.is_type", is_type_value, "4");
2212 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2213
Thierry Strudel3d639192016-09-09 11:52:26 -07002214 //Create metadata channel and initialize it
2215 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2216 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2217 gCamCapability[mCameraId]->color_arrangement);
2218 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2219 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002220 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002221 if (mMetadataChannel == NULL) {
2222 LOGE("failed to allocate metadata channel");
2223 rc = -ENOMEM;
2224 pthread_mutex_unlock(&mMutex);
2225 return rc;
2226 }
2227 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2228 if (rc < 0) {
2229 LOGE("metadata channel initialization failed");
2230 delete mMetadataChannel;
2231 mMetadataChannel = NULL;
2232 pthread_mutex_unlock(&mMutex);
2233 return rc;
2234 }
2235
Thierry Strudel2896d122017-02-23 19:18:03 -08002236 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002237 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002238 bool onlyRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002239 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2240 /* Allocate channel objects for the requested streams */
2241 for (size_t i = 0; i < streamList->num_streams; i++) {
2242 camera3_stream_t *newStream = streamList->streams[i];
2243 uint32_t stream_usage = newStream->usage;
2244 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2245 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2246 struct camera_info *p_info = NULL;
2247 pthread_mutex_lock(&gCamLock);
2248 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2249 pthread_mutex_unlock(&gCamLock);
2250 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2251 || IS_USAGE_ZSL(newStream->usage)) &&
2252 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002253 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002254 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002255 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2256 if (bUseCommonFeatureMask)
2257 zsl_ppmask = commonFeatureMask;
2258 else
2259 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002260 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002261 if (numStreamsOnEncoder > 0)
2262 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2263 else
2264 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002265 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002266 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002267 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002268 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002269 LOGH("Input stream configured, reprocess config");
2270 } else {
2271 //for non zsl streams find out the format
2272 switch (newStream->format) {
2273 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2274 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002275 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002276 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2277 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2278 /* add additional features to pp feature mask */
2279 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2280 mStreamConfigInfo.num_streams);
2281
2282 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2283 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2284 CAM_STREAM_TYPE_VIDEO;
2285 if (m_bTnrEnabled && m_bTnrVideo) {
2286 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2287 CAM_QCOM_FEATURE_CPP_TNR;
2288 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2289 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2290 ~CAM_QCOM_FEATURE_CDS;
2291 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002292 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2293 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2294 CAM_QTI_FEATURE_PPEISCORE;
2295 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002296 } else {
2297 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2298 CAM_STREAM_TYPE_PREVIEW;
2299 if (m_bTnrEnabled && m_bTnrPreview) {
2300 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2301 CAM_QCOM_FEATURE_CPP_TNR;
2302 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2303 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2304 ~CAM_QCOM_FEATURE_CDS;
2305 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002306 if(!m_bSwTnrPreview) {
2307 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2308 ~CAM_QTI_FEATURE_SW_TNR;
2309 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002310 padding_info.width_padding = mSurfaceStridePadding;
2311 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002312 previewSize.width = (int32_t)newStream->width;
2313 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002314 }
2315 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2316 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2317 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2318 newStream->height;
2319 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2320 newStream->width;
2321 }
2322 }
2323 break;
2324 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002325 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002326 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2327 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2328 if (bUseCommonFeatureMask)
2329 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2330 commonFeatureMask;
2331 else
2332 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2333 CAM_QCOM_FEATURE_NONE;
2334 } else {
2335 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2336 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2337 }
2338 break;
2339 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002340 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002341 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2342 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2343 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2344 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2345 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002346 /* Remove rotation if it is not supported
2347 for 4K LiveVideo snapshot case (online processing) */
2348 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2349 CAM_QCOM_FEATURE_ROTATION)) {
2350 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2351 &= ~CAM_QCOM_FEATURE_ROTATION;
2352 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002353 } else {
2354 if (bUseCommonFeatureMask &&
2355 isOnEncoder(maxViewfinderSize, newStream->width,
2356 newStream->height)) {
2357 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2358 } else {
2359 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2360 }
2361 }
2362 if (isZsl) {
2363 if (zslStream) {
2364 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2365 (int32_t)zslStream->width;
2366 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2367 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002368 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2369 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002370 } else {
2371 LOGE("Error, No ZSL stream identified");
2372 pthread_mutex_unlock(&mMutex);
2373 return -EINVAL;
2374 }
2375 } else if (m_bIs4KVideo) {
2376 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2377 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2378 } else if (bYuv888OverrideJpeg) {
2379 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2380 (int32_t)largeYuv888Size.width;
2381 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2382 (int32_t)largeYuv888Size.height;
2383 }
2384 break;
2385 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2386 case HAL_PIXEL_FORMAT_RAW16:
2387 case HAL_PIXEL_FORMAT_RAW10:
2388 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2389 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2390 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002391 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2392 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2393 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2394 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2395 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2396 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2397 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2398 gCamCapability[mCameraId]->dt[mPDIndex];
2399 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2400 gCamCapability[mCameraId]->vc[mPDIndex];
2401 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002402 break;
2403 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002404 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002405 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2406 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2407 break;
2408 }
2409 }
2410
2411 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2412 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2413 gCamCapability[mCameraId]->color_arrangement);
2414
2415 if (newStream->priv == NULL) {
2416 //New stream, construct channel
2417 switch (newStream->stream_type) {
2418 case CAMERA3_STREAM_INPUT:
2419 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2420 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2421 break;
2422 case CAMERA3_STREAM_BIDIRECTIONAL:
2423 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2424 GRALLOC_USAGE_HW_CAMERA_WRITE;
2425 break;
2426 case CAMERA3_STREAM_OUTPUT:
2427 /* For video encoding stream, set read/write rarely
2428 * flag so that they may be set to un-cached */
2429 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2430 newStream->usage |=
2431 (GRALLOC_USAGE_SW_READ_RARELY |
2432 GRALLOC_USAGE_SW_WRITE_RARELY |
2433 GRALLOC_USAGE_HW_CAMERA_WRITE);
2434 else if (IS_USAGE_ZSL(newStream->usage))
2435 {
2436 LOGD("ZSL usage flag skipping");
2437 }
2438 else if (newStream == zslStream
2439 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2440 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2441 } else
2442 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2443 break;
2444 default:
2445 LOGE("Invalid stream_type %d", newStream->stream_type);
2446 break;
2447 }
2448
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002449 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002450 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2451 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2452 QCamera3ProcessingChannel *channel = NULL;
2453 switch (newStream->format) {
2454 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2455 if ((newStream->usage &
2456 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2457 (streamList->operation_mode ==
2458 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2459 ) {
2460 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2461 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002462 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002463 this,
2464 newStream,
2465 (cam_stream_type_t)
2466 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2467 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2468 mMetadataChannel,
2469 0); //heap buffers are not required for HFR video channel
2470 if (channel == NULL) {
2471 LOGE("allocation of channel failed");
2472 pthread_mutex_unlock(&mMutex);
2473 return -ENOMEM;
2474 }
2475 //channel->getNumBuffers() will return 0 here so use
2476 //MAX_INFLIGH_HFR_REQUESTS
2477 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2478 newStream->priv = channel;
2479 LOGI("num video buffers in HFR mode: %d",
2480 MAX_INFLIGHT_HFR_REQUESTS);
2481 } else {
2482 /* Copy stream contents in HFR preview only case to create
2483 * dummy batch channel so that sensor streaming is in
2484 * HFR mode */
2485 if (!m_bIsVideo && (streamList->operation_mode ==
2486 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2487 mDummyBatchStream = *newStream;
2488 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002489 int bufferCount = MAX_INFLIGHT_REQUESTS;
2490 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2491 CAM_STREAM_TYPE_VIDEO) {
2492 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */)
2493 bufferCount = MAX_VIDEO_BUFFERS;
2494 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002495 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2496 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002497 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002498 this,
2499 newStream,
2500 (cam_stream_type_t)
2501 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2502 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2503 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002504 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002505 if (channel == NULL) {
2506 LOGE("allocation of channel failed");
2507 pthread_mutex_unlock(&mMutex);
2508 return -ENOMEM;
2509 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002510 /* disable UBWC for preview, though supported,
2511 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002512 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002513 (previewSize.width == (int32_t)videoWidth)&&
2514 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002515 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002516 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002517 channel->setUBWCEnabled(forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002518 newStream->max_buffers = channel->getNumBuffers();
2519 newStream->priv = channel;
2520 }
2521 break;
2522 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2523 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2524 mChannelHandle,
2525 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002526 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002527 this,
2528 newStream,
2529 (cam_stream_type_t)
2530 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2531 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2532 mMetadataChannel);
2533 if (channel == NULL) {
2534 LOGE("allocation of YUV channel failed");
2535 pthread_mutex_unlock(&mMutex);
2536 return -ENOMEM;
2537 }
2538 newStream->max_buffers = channel->getNumBuffers();
2539 newStream->priv = channel;
2540 break;
2541 }
2542 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2543 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002544 case HAL_PIXEL_FORMAT_RAW10: {
2545 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2546 (HAL_DATASPACE_DEPTH != newStream->data_space))
2547 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002548 mRawChannel = new QCamera3RawChannel(
2549 mCameraHandle->camera_handle, mChannelHandle,
2550 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002551 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002552 this, newStream,
2553 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002554 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002555 if (mRawChannel == NULL) {
2556 LOGE("allocation of raw channel failed");
2557 pthread_mutex_unlock(&mMutex);
2558 return -ENOMEM;
2559 }
2560 newStream->max_buffers = mRawChannel->getNumBuffers();
2561 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2562 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002563 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002564 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002565 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2566 mDepthChannel = new QCamera3DepthChannel(
2567 mCameraHandle->camera_handle, mChannelHandle,
2568 mCameraHandle->ops, NULL, NULL, &padding_info,
2569 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2570 mMetadataChannel);
2571 if (NULL == mDepthChannel) {
2572 LOGE("Allocation of depth channel failed");
2573 pthread_mutex_unlock(&mMutex);
2574 return NO_MEMORY;
2575 }
2576 newStream->priv = mDepthChannel;
2577 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2578 } else {
2579 // Max live snapshot inflight buffer is 1. This is to mitigate
2580 // frame drop issues for video snapshot. The more buffers being
2581 // allocated, the more frame drops there are.
2582 mPictureChannel = new QCamera3PicChannel(
2583 mCameraHandle->camera_handle, mChannelHandle,
2584 mCameraHandle->ops, captureResultCb,
2585 setBufferErrorStatus, &padding_info, this, newStream,
2586 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2587 m_bIs4KVideo, isZsl, mMetadataChannel,
2588 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2589 if (mPictureChannel == NULL) {
2590 LOGE("allocation of channel failed");
2591 pthread_mutex_unlock(&mMutex);
2592 return -ENOMEM;
2593 }
2594 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2595 newStream->max_buffers = mPictureChannel->getNumBuffers();
2596 mPictureChannel->overrideYuvSize(
2597 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2598 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002599 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002600 break;
2601
2602 default:
2603 LOGE("not a supported format 0x%x", newStream->format);
2604 break;
2605 }
2606 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2607 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2608 } else {
2609 LOGE("Error, Unknown stream type");
2610 pthread_mutex_unlock(&mMutex);
2611 return -EINVAL;
2612 }
2613
2614 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002615 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
2616 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002617 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002618 newStream->width, newStream->height, forcePreviewUBWC);
Thierry Strudel3d639192016-09-09 11:52:26 -07002619 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2620 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2621 }
2622 }
2623
2624 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2625 it != mStreamInfo.end(); it++) {
2626 if ((*it)->stream == newStream) {
2627 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2628 break;
2629 }
2630 }
2631 } else {
2632 // Channel already exists for this stream
2633 // Do nothing for now
2634 }
2635 padding_info = gCamCapability[mCameraId]->padding_info;
2636
Emilian Peev7650c122017-01-19 08:24:33 -08002637 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002638 * since there is no real stream associated with it
2639 */
Emilian Peev7650c122017-01-19 08:24:33 -08002640 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002641 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2642 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002643 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002644 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002645 }
2646
Thierry Strudel2896d122017-02-23 19:18:03 -08002647 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2648 onlyRaw = false;
2649 }
2650
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002651 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002652 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002653 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002654 cam_analysis_info_t analysisInfo;
2655 int32_t ret = NO_ERROR;
2656 ret = mCommon.getAnalysisInfo(
2657 FALSE,
2658 analysisFeatureMask,
2659 &analysisInfo);
2660 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002661 cam_color_filter_arrangement_t analysis_color_arrangement =
2662 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2663 CAM_FILTER_ARRANGEMENT_Y :
2664 gCamCapability[mCameraId]->color_arrangement);
2665 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2666 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002667 cam_dimension_t analysisDim;
2668 analysisDim = mCommon.getMatchingDimension(previewSize,
2669 analysisInfo.analysis_recommended_res);
2670
2671 mAnalysisChannel = new QCamera3SupportChannel(
2672 mCameraHandle->camera_handle,
2673 mChannelHandle,
2674 mCameraHandle->ops,
2675 &analysisInfo.analysis_padding_info,
2676 analysisFeatureMask,
2677 CAM_STREAM_TYPE_ANALYSIS,
2678 &analysisDim,
2679 (analysisInfo.analysis_format
2680 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2681 : CAM_FORMAT_YUV_420_NV21),
2682 analysisInfo.hw_analysis_supported,
2683 gCamCapability[mCameraId]->color_arrangement,
2684 this,
2685 0); // force buffer count to 0
2686 } else {
2687 LOGW("getAnalysisInfo failed, ret = %d", ret);
2688 }
2689 if (!mAnalysisChannel) {
2690 LOGW("Analysis channel cannot be created");
2691 }
2692 }
2693
Thierry Strudel3d639192016-09-09 11:52:26 -07002694 //RAW DUMP channel
2695 if (mEnableRawDump && isRawStreamRequested == false){
2696 cam_dimension_t rawDumpSize;
2697 rawDumpSize = getMaxRawSize(mCameraId);
2698 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2699 setPAAFSupport(rawDumpFeatureMask,
2700 CAM_STREAM_TYPE_RAW,
2701 gCamCapability[mCameraId]->color_arrangement);
2702 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2703 mChannelHandle,
2704 mCameraHandle->ops,
2705 rawDumpSize,
2706 &padding_info,
2707 this, rawDumpFeatureMask);
2708 if (!mRawDumpChannel) {
2709 LOGE("Raw Dump channel cannot be created");
2710 pthread_mutex_unlock(&mMutex);
2711 return -ENOMEM;
2712 }
2713 }
2714
Thierry Strudel3d639192016-09-09 11:52:26 -07002715 if (mAnalysisChannel) {
2716 cam_analysis_info_t analysisInfo;
2717 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2718 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2719 CAM_STREAM_TYPE_ANALYSIS;
2720 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2721 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002722 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002723 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2724 &analysisInfo);
2725 if (rc != NO_ERROR) {
2726 LOGE("getAnalysisInfo failed, ret = %d", rc);
2727 pthread_mutex_unlock(&mMutex);
2728 return rc;
2729 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002730 cam_color_filter_arrangement_t analysis_color_arrangement =
2731 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2732 CAM_FILTER_ARRANGEMENT_Y :
2733 gCamCapability[mCameraId]->color_arrangement);
2734 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2735 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2736 analysis_color_arrangement);
2737
Thierry Strudel3d639192016-09-09 11:52:26 -07002738 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002739 mCommon.getMatchingDimension(previewSize,
2740 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002741 mStreamConfigInfo.num_streams++;
2742 }
2743
Thierry Strudel2896d122017-02-23 19:18:03 -08002744 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002745 cam_analysis_info_t supportInfo;
2746 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2747 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2748 setPAAFSupport(callbackFeatureMask,
2749 CAM_STREAM_TYPE_CALLBACK,
2750 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002751 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002752 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002753 if (ret != NO_ERROR) {
2754 /* Ignore the error for Mono camera
2755 * because the PAAF bit mask is only set
2756 * for CAM_STREAM_TYPE_ANALYSIS stream type
2757 */
2758 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2759 LOGW("getAnalysisInfo failed, ret = %d", ret);
2760 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002761 }
2762 mSupportChannel = new QCamera3SupportChannel(
2763 mCameraHandle->camera_handle,
2764 mChannelHandle,
2765 mCameraHandle->ops,
2766 &gCamCapability[mCameraId]->padding_info,
2767 callbackFeatureMask,
2768 CAM_STREAM_TYPE_CALLBACK,
2769 &QCamera3SupportChannel::kDim,
2770 CAM_FORMAT_YUV_420_NV21,
2771 supportInfo.hw_analysis_supported,
2772 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002773 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002774 if (!mSupportChannel) {
2775 LOGE("dummy channel cannot be created");
2776 pthread_mutex_unlock(&mMutex);
2777 return -ENOMEM;
2778 }
2779 }
2780
2781 if (mSupportChannel) {
2782 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2783 QCamera3SupportChannel::kDim;
2784 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2785 CAM_STREAM_TYPE_CALLBACK;
2786 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2787 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2788 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2789 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2790 gCamCapability[mCameraId]->color_arrangement);
2791 mStreamConfigInfo.num_streams++;
2792 }
2793
2794 if (mRawDumpChannel) {
2795 cam_dimension_t rawSize;
2796 rawSize = getMaxRawSize(mCameraId);
2797 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2798 rawSize;
2799 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2800 CAM_STREAM_TYPE_RAW;
2801 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2802 CAM_QCOM_FEATURE_NONE;
2803 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2804 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2805 gCamCapability[mCameraId]->color_arrangement);
2806 mStreamConfigInfo.num_streams++;
2807 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002808
2809 if (mHdrPlusRawSrcChannel) {
2810 cam_dimension_t rawSize;
2811 rawSize = getMaxRawSize(mCameraId);
2812 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2813 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2814 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2815 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2816 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2817 gCamCapability[mCameraId]->color_arrangement);
2818 mStreamConfigInfo.num_streams++;
2819 }
2820
Thierry Strudel3d639192016-09-09 11:52:26 -07002821 /* In HFR mode, if video stream is not added, create a dummy channel so that
2822 * ISP can create a batch mode even for preview only case. This channel is
2823 * never 'start'ed (no stream-on), it is only 'initialized' */
2824 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2825 !m_bIsVideo) {
2826 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2827 setPAAFSupport(dummyFeatureMask,
2828 CAM_STREAM_TYPE_VIDEO,
2829 gCamCapability[mCameraId]->color_arrangement);
2830 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2831 mChannelHandle,
2832 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002833 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002834 this,
2835 &mDummyBatchStream,
2836 CAM_STREAM_TYPE_VIDEO,
2837 dummyFeatureMask,
2838 mMetadataChannel);
2839 if (NULL == mDummyBatchChannel) {
2840 LOGE("creation of mDummyBatchChannel failed."
2841 "Preview will use non-hfr sensor mode ");
2842 }
2843 }
2844 if (mDummyBatchChannel) {
2845 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2846 mDummyBatchStream.width;
2847 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2848 mDummyBatchStream.height;
2849 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2850 CAM_STREAM_TYPE_VIDEO;
2851 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2852 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2853 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2854 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2855 gCamCapability[mCameraId]->color_arrangement);
2856 mStreamConfigInfo.num_streams++;
2857 }
2858
2859 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2860 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08002861 m_bIs4KVideo ? 0 :
2862 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07002863
2864 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2865 for (pendingRequestIterator i = mPendingRequestsList.begin();
2866 i != mPendingRequestsList.end();) {
2867 i = erasePendingRequest(i);
2868 }
2869 mPendingFrameDropList.clear();
2870 // Initialize/Reset the pending buffers list
2871 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2872 req.mPendingBufferList.clear();
2873 }
2874 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2875
Thierry Strudel3d639192016-09-09 11:52:26 -07002876 mCurJpegMeta.clear();
2877 //Get min frame duration for this streams configuration
2878 deriveMinFrameDuration();
2879
Chien-Yu Chenee335912017-02-09 17:53:20 -08002880 mFirstPreviewIntentSeen = false;
2881
2882 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07002883 {
2884 Mutex::Autolock l(gHdrPlusClientLock);
2885 disableHdrPlusModeLocked();
2886 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08002887
Thierry Strudel3d639192016-09-09 11:52:26 -07002888 // Update state
2889 mState = CONFIGURED;
2890
2891 pthread_mutex_unlock(&mMutex);
2892
2893 return rc;
2894}
2895
2896/*===========================================================================
2897 * FUNCTION : validateCaptureRequest
2898 *
2899 * DESCRIPTION: validate a capture request from camera service
2900 *
2901 * PARAMETERS :
2902 * @request : request from framework to process
2903 *
2904 * RETURN :
2905 *
2906 *==========================================================================*/
2907int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002908 camera3_capture_request_t *request,
2909 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002910{
2911 ssize_t idx = 0;
2912 const camera3_stream_buffer_t *b;
2913 CameraMetadata meta;
2914
2915 /* Sanity check the request */
2916 if (request == NULL) {
2917 LOGE("NULL capture request");
2918 return BAD_VALUE;
2919 }
2920
2921 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2922 /*settings cannot be null for the first request*/
2923 return BAD_VALUE;
2924 }
2925
2926 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002927 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2928 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002929 LOGE("Request %d: No output buffers provided!",
2930 __FUNCTION__, frameNumber);
2931 return BAD_VALUE;
2932 }
2933 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2934 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2935 request->num_output_buffers, MAX_NUM_STREAMS);
2936 return BAD_VALUE;
2937 }
2938 if (request->input_buffer != NULL) {
2939 b = request->input_buffer;
2940 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2941 LOGE("Request %d: Buffer %ld: Status not OK!",
2942 frameNumber, (long)idx);
2943 return BAD_VALUE;
2944 }
2945 if (b->release_fence != -1) {
2946 LOGE("Request %d: Buffer %ld: Has a release fence!",
2947 frameNumber, (long)idx);
2948 return BAD_VALUE;
2949 }
2950 if (b->buffer == NULL) {
2951 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2952 frameNumber, (long)idx);
2953 return BAD_VALUE;
2954 }
2955 }
2956
2957 // Validate all buffers
2958 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002959 if (b == NULL) {
2960 return BAD_VALUE;
2961 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002962 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002963 QCamera3ProcessingChannel *channel =
2964 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2965 if (channel == NULL) {
2966 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2967 frameNumber, (long)idx);
2968 return BAD_VALUE;
2969 }
2970 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2971 LOGE("Request %d: Buffer %ld: Status not OK!",
2972 frameNumber, (long)idx);
2973 return BAD_VALUE;
2974 }
2975 if (b->release_fence != -1) {
2976 LOGE("Request %d: Buffer %ld: Has a release fence!",
2977 frameNumber, (long)idx);
2978 return BAD_VALUE;
2979 }
2980 if (b->buffer == NULL) {
2981 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2982 frameNumber, (long)idx);
2983 return BAD_VALUE;
2984 }
2985 if (*(b->buffer) == NULL) {
2986 LOGE("Request %d: Buffer %ld: NULL private handle!",
2987 frameNumber, (long)idx);
2988 return BAD_VALUE;
2989 }
2990 idx++;
2991 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002992 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002993 return NO_ERROR;
2994}
2995
2996/*===========================================================================
2997 * FUNCTION : deriveMinFrameDuration
2998 *
2999 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3000 * on currently configured streams.
3001 *
3002 * PARAMETERS : NONE
3003 *
3004 * RETURN : NONE
3005 *
3006 *==========================================================================*/
3007void QCamera3HardwareInterface::deriveMinFrameDuration()
3008{
3009 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
3010
3011 maxJpegDim = 0;
3012 maxProcessedDim = 0;
3013 maxRawDim = 0;
3014
3015 // Figure out maximum jpeg, processed, and raw dimensions
3016 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3017 it != mStreamInfo.end(); it++) {
3018
3019 // Input stream doesn't have valid stream_type
3020 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3021 continue;
3022
3023 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3024 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3025 if (dimension > maxJpegDim)
3026 maxJpegDim = dimension;
3027 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3028 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3029 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
3030 if (dimension > maxRawDim)
3031 maxRawDim = dimension;
3032 } else {
3033 if (dimension > maxProcessedDim)
3034 maxProcessedDim = dimension;
3035 }
3036 }
3037
3038 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3039 MAX_SIZES_CNT);
3040
3041 //Assume all jpeg dimensions are in processed dimensions.
3042 if (maxJpegDim > maxProcessedDim)
3043 maxProcessedDim = maxJpegDim;
3044 //Find the smallest raw dimension that is greater or equal to jpeg dimension
3045 if (maxProcessedDim > maxRawDim) {
3046 maxRawDim = INT32_MAX;
3047
3048 for (size_t i = 0; i < count; i++) {
3049 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3050 gCamCapability[mCameraId]->raw_dim[i].height;
3051 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3052 maxRawDim = dimension;
3053 }
3054 }
3055
3056 //Find minimum durations for processed, jpeg, and raw
3057 for (size_t i = 0; i < count; i++) {
3058 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3059 gCamCapability[mCameraId]->raw_dim[i].height) {
3060 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3061 break;
3062 }
3063 }
3064 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3065 for (size_t i = 0; i < count; i++) {
3066 if (maxProcessedDim ==
3067 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3068 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3069 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3070 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3071 break;
3072 }
3073 }
3074}
3075
3076/*===========================================================================
3077 * FUNCTION : getMinFrameDuration
3078 *
3079 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3080 * and current request configuration.
3081 *
3082 * PARAMETERS : @request: requset sent by the frameworks
3083 *
3084 * RETURN : min farme duration for a particular request
3085 *
3086 *==========================================================================*/
3087int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3088{
3089 bool hasJpegStream = false;
3090 bool hasRawStream = false;
3091 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3092 const camera3_stream_t *stream = request->output_buffers[i].stream;
3093 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3094 hasJpegStream = true;
3095 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3096 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3097 stream->format == HAL_PIXEL_FORMAT_RAW16)
3098 hasRawStream = true;
3099 }
3100
3101 if (!hasJpegStream)
3102 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3103 else
3104 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3105}
3106
3107/*===========================================================================
3108 * FUNCTION : handleBuffersDuringFlushLock
3109 *
3110 * DESCRIPTION: Account for buffers returned from back-end during flush
3111 * This function is executed while mMutex is held by the caller.
3112 *
3113 * PARAMETERS :
3114 * @buffer: image buffer for the callback
3115 *
3116 * RETURN :
3117 *==========================================================================*/
3118void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3119{
3120 bool buffer_found = false;
3121 for (List<PendingBuffersInRequest>::iterator req =
3122 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3123 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3124 for (List<PendingBufferInfo>::iterator i =
3125 req->mPendingBufferList.begin();
3126 i != req->mPendingBufferList.end(); i++) {
3127 if (i->buffer == buffer->buffer) {
3128 mPendingBuffersMap.numPendingBufsAtFlush--;
3129 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3130 buffer->buffer, req->frame_number,
3131 mPendingBuffersMap.numPendingBufsAtFlush);
3132 buffer_found = true;
3133 break;
3134 }
3135 }
3136 if (buffer_found) {
3137 break;
3138 }
3139 }
3140 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3141 //signal the flush()
3142 LOGD("All buffers returned to HAL. Continue flush");
3143 pthread_cond_signal(&mBuffersCond);
3144 }
3145}
3146
Thierry Strudel3d639192016-09-09 11:52:26 -07003147/*===========================================================================
3148 * FUNCTION : handleBatchMetadata
3149 *
3150 * DESCRIPTION: Handles metadata buffer callback in batch mode
3151 *
3152 * PARAMETERS : @metadata_buf: metadata buffer
3153 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3154 * the meta buf in this method
3155 *
3156 * RETURN :
3157 *
3158 *==========================================================================*/
3159void QCamera3HardwareInterface::handleBatchMetadata(
3160 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3161{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003162 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003163
3164 if (NULL == metadata_buf) {
3165 LOGE("metadata_buf is NULL");
3166 return;
3167 }
3168 /* In batch mode, the metdata will contain the frame number and timestamp of
3169 * the last frame in the batch. Eg: a batch containing buffers from request
3170 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3171 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3172 * multiple process_capture_results */
3173 metadata_buffer_t *metadata =
3174 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3175 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3176 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3177 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3178 uint32_t frame_number = 0, urgent_frame_number = 0;
3179 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3180 bool invalid_metadata = false;
3181 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3182 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003183 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003184
3185 int32_t *p_frame_number_valid =
3186 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3187 uint32_t *p_frame_number =
3188 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3189 int64_t *p_capture_time =
3190 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3191 int32_t *p_urgent_frame_number_valid =
3192 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3193 uint32_t *p_urgent_frame_number =
3194 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3195
3196 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3197 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3198 (NULL == p_urgent_frame_number)) {
3199 LOGE("Invalid metadata");
3200 invalid_metadata = true;
3201 } else {
3202 frame_number_valid = *p_frame_number_valid;
3203 last_frame_number = *p_frame_number;
3204 last_frame_capture_time = *p_capture_time;
3205 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3206 last_urgent_frame_number = *p_urgent_frame_number;
3207 }
3208
3209 /* In batchmode, when no video buffers are requested, set_parms are sent
3210 * for every capture_request. The difference between consecutive urgent
3211 * frame numbers and frame numbers should be used to interpolate the
3212 * corresponding frame numbers and time stamps */
3213 pthread_mutex_lock(&mMutex);
3214 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003215 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3216 if(idx < 0) {
3217 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3218 last_urgent_frame_number);
3219 mState = ERROR;
3220 pthread_mutex_unlock(&mMutex);
3221 return;
3222 }
3223 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003224 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3225 first_urgent_frame_number;
3226
3227 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3228 urgent_frame_number_valid,
3229 first_urgent_frame_number, last_urgent_frame_number);
3230 }
3231
3232 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003233 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3234 if(idx < 0) {
3235 LOGE("Invalid frame number received: %d. Irrecoverable error",
3236 last_frame_number);
3237 mState = ERROR;
3238 pthread_mutex_unlock(&mMutex);
3239 return;
3240 }
3241 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003242 frameNumDiff = last_frame_number + 1 -
3243 first_frame_number;
3244 mPendingBatchMap.removeItem(last_frame_number);
3245
3246 LOGD("frm: valid: %d frm_num: %d - %d",
3247 frame_number_valid,
3248 first_frame_number, last_frame_number);
3249
3250 }
3251 pthread_mutex_unlock(&mMutex);
3252
3253 if (urgent_frame_number_valid || frame_number_valid) {
3254 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3255 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3256 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3257 urgentFrameNumDiff, last_urgent_frame_number);
3258 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3259 LOGE("frameNumDiff: %d frameNum: %d",
3260 frameNumDiff, last_frame_number);
3261 }
3262
3263 for (size_t i = 0; i < loopCount; i++) {
3264 /* handleMetadataWithLock is called even for invalid_metadata for
3265 * pipeline depth calculation */
3266 if (!invalid_metadata) {
3267 /* Infer frame number. Batch metadata contains frame number of the
3268 * last frame */
3269 if (urgent_frame_number_valid) {
3270 if (i < urgentFrameNumDiff) {
3271 urgent_frame_number =
3272 first_urgent_frame_number + i;
3273 LOGD("inferred urgent frame_number: %d",
3274 urgent_frame_number);
3275 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3276 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3277 } else {
3278 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3279 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3280 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3281 }
3282 }
3283
3284 /* Infer frame number. Batch metadata contains frame number of the
3285 * last frame */
3286 if (frame_number_valid) {
3287 if (i < frameNumDiff) {
3288 frame_number = first_frame_number + i;
3289 LOGD("inferred frame_number: %d", frame_number);
3290 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3291 CAM_INTF_META_FRAME_NUMBER, frame_number);
3292 } else {
3293 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3294 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3295 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3296 }
3297 }
3298
3299 if (last_frame_capture_time) {
3300 //Infer timestamp
3301 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003302 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003303 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003304 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003305 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3306 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3307 LOGD("batch capture_time: %lld, capture_time: %lld",
3308 last_frame_capture_time, capture_time);
3309 }
3310 }
3311 pthread_mutex_lock(&mMutex);
3312 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003313 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003314 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3315 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003316 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003317 pthread_mutex_unlock(&mMutex);
3318 }
3319
3320 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003321 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003322 mMetadataChannel->bufDone(metadata_buf);
3323 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003324 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003325 }
3326}
3327
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003328void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3329 camera3_error_msg_code_t errorCode)
3330{
3331 camera3_notify_msg_t notify_msg;
3332 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3333 notify_msg.type = CAMERA3_MSG_ERROR;
3334 notify_msg.message.error.error_code = errorCode;
3335 notify_msg.message.error.error_stream = NULL;
3336 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003337 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003338
3339 return;
3340}
Thierry Strudel3d639192016-09-09 11:52:26 -07003341/*===========================================================================
3342 * FUNCTION : handleMetadataWithLock
3343 *
3344 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3345 *
3346 * PARAMETERS : @metadata_buf: metadata buffer
3347 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3348 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003349 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3350 * last urgent metadata in a batch. Always true for non-batch mode
3351 * @lastMetadataInBatch: Boolean to indicate whether this is the
3352 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003353 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3354 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003355 *
3356 * RETURN :
3357 *
3358 *==========================================================================*/
3359void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003360 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003361 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3362 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003363{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003364 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003365 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3366 //during flush do not send metadata from this thread
3367 LOGD("not sending metadata during flush or when mState is error");
3368 if (free_and_bufdone_meta_buf) {
3369 mMetadataChannel->bufDone(metadata_buf);
3370 free(metadata_buf);
3371 }
3372 return;
3373 }
3374
3375 //not in flush
3376 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3377 int32_t frame_number_valid, urgent_frame_number_valid;
3378 uint32_t frame_number, urgent_frame_number;
3379 int64_t capture_time;
3380 nsecs_t currentSysTime;
3381
3382 int32_t *p_frame_number_valid =
3383 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3384 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3385 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3386 int32_t *p_urgent_frame_number_valid =
3387 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3388 uint32_t *p_urgent_frame_number =
3389 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3390 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3391 metadata) {
3392 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3393 *p_frame_number_valid, *p_frame_number);
3394 }
3395
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003396 camera_metadata_t *resultMetadata = nullptr;
3397
Thierry Strudel3d639192016-09-09 11:52:26 -07003398 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3399 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3400 LOGE("Invalid metadata");
3401 if (free_and_bufdone_meta_buf) {
3402 mMetadataChannel->bufDone(metadata_buf);
3403 free(metadata_buf);
3404 }
3405 goto done_metadata;
3406 }
3407 frame_number_valid = *p_frame_number_valid;
3408 frame_number = *p_frame_number;
3409 capture_time = *p_capture_time;
3410 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3411 urgent_frame_number = *p_urgent_frame_number;
3412 currentSysTime = systemTime(CLOCK_MONOTONIC);
3413
3414 // Detect if buffers from any requests are overdue
3415 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003416 int64_t timeout;
3417 {
3418 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3419 // If there is a pending HDR+ request, the following requests may be blocked until the
3420 // HDR+ request is done. So allow a longer timeout.
3421 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3422 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3423 }
3424
3425 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003426 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003427 assert(missed.stream->priv);
3428 if (missed.stream->priv) {
3429 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3430 assert(ch->mStreams[0]);
3431 if (ch->mStreams[0]) {
3432 LOGE("Cancel missing frame = %d, buffer = %p,"
3433 "stream type = %d, stream format = %d",
3434 req.frame_number, missed.buffer,
3435 ch->mStreams[0]->getMyType(), missed.stream->format);
3436 ch->timeoutFrame(req.frame_number);
3437 }
3438 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003439 }
3440 }
3441 }
3442 //Partial result on process_capture_result for timestamp
3443 if (urgent_frame_number_valid) {
3444 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3445 urgent_frame_number, capture_time);
3446
3447 //Recieved an urgent Frame Number, handle it
3448 //using partial results
3449 for (pendingRequestIterator i =
3450 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3451 LOGD("Iterator Frame = %d urgent frame = %d",
3452 i->frame_number, urgent_frame_number);
3453
3454 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3455 (i->partial_result_cnt == 0)) {
3456 LOGE("Error: HAL missed urgent metadata for frame number %d",
3457 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003458 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003459 }
3460
3461 if (i->frame_number == urgent_frame_number &&
3462 i->bUrgentReceived == 0) {
3463
3464 camera3_capture_result_t result;
3465 memset(&result, 0, sizeof(camera3_capture_result_t));
3466
3467 i->partial_result_cnt++;
3468 i->bUrgentReceived = 1;
3469 // Extract 3A metadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003470 result.result = translateCbUrgentMetadataToResultMetadata(
3471 metadata, lastUrgentMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003472 // Populate metadata result
3473 result.frame_number = urgent_frame_number;
3474 result.num_output_buffers = 0;
3475 result.output_buffers = NULL;
3476 result.partial_result = i->partial_result_cnt;
3477
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003478 {
3479 Mutex::Autolock l(gHdrPlusClientLock);
3480 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3481 // Notify HDR+ client about the partial metadata.
3482 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3483 result.partial_result == PARTIAL_RESULT_COUNT);
3484 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003485 }
3486
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003487 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003488 LOGD("urgent frame_number = %u, capture_time = %lld",
3489 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003490 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3491 // Instant AEC settled for this frame.
3492 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3493 mInstantAECSettledFrameNumber = urgent_frame_number;
3494 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 free_camera_metadata((camera_metadata_t *)result.result);
3496 break;
3497 }
3498 }
3499 }
3500
3501 if (!frame_number_valid) {
3502 LOGD("Not a valid normal frame number, used as SOF only");
3503 if (free_and_bufdone_meta_buf) {
3504 mMetadataChannel->bufDone(metadata_buf);
3505 free(metadata_buf);
3506 }
3507 goto done_metadata;
3508 }
3509 LOGH("valid frame_number = %u, capture_time = %lld",
3510 frame_number, capture_time);
3511
Emilian Peev7650c122017-01-19 08:24:33 -08003512 if (metadata->is_depth_data_valid) {
3513 handleDepthDataLocked(metadata->depth_data, frame_number);
3514 }
3515
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003516 // Check whether any stream buffer corresponding to this is dropped or not
3517 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3518 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3519 for (auto & pendingRequest : mPendingRequestsList) {
3520 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3521 mInstantAECSettledFrameNumber)) {
3522 camera3_notify_msg_t notify_msg = {};
3523 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003524 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003525 QCamera3ProcessingChannel *channel =
3526 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003527 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003528 if (p_cam_frame_drop) {
3529 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003530 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003531 // Got the stream ID for drop frame.
3532 dropFrame = true;
3533 break;
3534 }
3535 }
3536 } else {
3537 // This is instant AEC case.
3538 // For instant AEC drop the stream untill AEC is settled.
3539 dropFrame = true;
3540 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003541
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003542 if (dropFrame) {
3543 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3544 if (p_cam_frame_drop) {
3545 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003546 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003547 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003548 } else {
3549 // For instant AEC, inform frame drop and frame number
3550 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3551 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003552 pendingRequest.frame_number, streamID,
3553 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003554 }
3555 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003556 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003557 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003558 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003559 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003560 if (p_cam_frame_drop) {
3561 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003562 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003563 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003564 } else {
3565 // For instant AEC, inform frame drop and frame number
3566 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3567 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003568 pendingRequest.frame_number, streamID,
3569 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003570 }
3571 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003572 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003573 PendingFrameDrop.stream_ID = streamID;
3574 // Add the Frame drop info to mPendingFrameDropList
3575 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003576 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003577 }
3578 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003579 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003580
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003581 for (auto & pendingRequest : mPendingRequestsList) {
3582 // Find the pending request with the frame number.
3583 if (pendingRequest.frame_number == frame_number) {
3584 // Update the sensor timestamp.
3585 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003586
Thierry Strudel3d639192016-09-09 11:52:26 -07003587
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003588 /* Set the timestamp in display metadata so that clients aware of
3589 private_handle such as VT can use this un-modified timestamps.
3590 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003591 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003592
Thierry Strudel3d639192016-09-09 11:52:26 -07003593 // Find channel requiring metadata, meaning internal offline postprocess
3594 // is needed.
3595 //TODO: for now, we don't support two streams requiring metadata at the same time.
3596 // (because we are not making copies, and metadata buffer is not reference counted.
3597 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003598 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3599 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003600 if (iter->need_metadata) {
3601 internalPproc = true;
3602 QCamera3ProcessingChannel *channel =
3603 (QCamera3ProcessingChannel *)iter->stream->priv;
3604 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003605 if(p_is_metabuf_queued != NULL) {
3606 *p_is_metabuf_queued = true;
3607 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003608 break;
3609 }
3610 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003611 for (auto itr = pendingRequest.internalRequestList.begin();
3612 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003613 if (itr->need_metadata) {
3614 internalPproc = true;
3615 QCamera3ProcessingChannel *channel =
3616 (QCamera3ProcessingChannel *)itr->stream->priv;
3617 channel->queueReprocMetadata(metadata_buf);
3618 break;
3619 }
3620 }
3621
Thierry Strudel54dc9782017-02-15 12:12:10 -08003622 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003623
3624 bool *enableZsl = nullptr;
3625 if (gExposeEnableZslKey) {
3626 enableZsl = &pendingRequest.enableZsl;
3627 }
3628
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003629 resultMetadata = translateFromHalMetadata(metadata,
3630 pendingRequest.timestamp, pendingRequest.request_id,
3631 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3632 pendingRequest.capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07003633 pendingRequest.hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003634 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003635 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003636 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003637 internalPproc, pendingRequest.fwkCacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003638 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003639
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003640 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003641
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003642 if (pendingRequest.blob_request) {
3643 //Dump tuning metadata if enabled and available
3644 char prop[PROPERTY_VALUE_MAX];
3645 memset(prop, 0, sizeof(prop));
3646 property_get("persist.camera.dumpmetadata", prop, "0");
3647 int32_t enabled = atoi(prop);
3648 if (enabled && metadata->is_tuning_params_valid) {
3649 dumpMetadataToFile(metadata->tuning_params,
3650 mMetaFrameCount,
3651 enabled,
3652 "Snapshot",
3653 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003654 }
3655 }
3656
3657 if (!internalPproc) {
3658 LOGD("couldn't find need_metadata for this metadata");
3659 // Return metadata buffer
3660 if (free_and_bufdone_meta_buf) {
3661 mMetadataChannel->bufDone(metadata_buf);
3662 free(metadata_buf);
3663 }
3664 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003665
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003666 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003667 }
3668 }
3669
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003670 // Try to send out shutter callbacks and capture results.
3671 handlePendingResultsWithLock(frame_number, resultMetadata);
3672 return;
3673
Thierry Strudel3d639192016-09-09 11:52:26 -07003674done_metadata:
3675 for (pendingRequestIterator i = mPendingRequestsList.begin();
3676 i != mPendingRequestsList.end() ;i++) {
3677 i->pipeline_depth++;
3678 }
3679 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3680 unblockRequestIfNecessary();
3681}
3682
3683/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003684 * FUNCTION : handleDepthDataWithLock
3685 *
3686 * DESCRIPTION: Handles incoming depth data
3687 *
3688 * PARAMETERS : @depthData : Depth data
3689 * @frameNumber: Frame number of the incoming depth data
3690 *
3691 * RETURN :
3692 *
3693 *==========================================================================*/
3694void QCamera3HardwareInterface::handleDepthDataLocked(
3695 const cam_depth_data_t &depthData, uint32_t frameNumber) {
3696 uint32_t currentFrameNumber;
3697 buffer_handle_t *depthBuffer;
3698
3699 if (nullptr == mDepthChannel) {
3700 LOGE("Depth channel not present!");
3701 return;
3702 }
3703
3704 camera3_stream_buffer_t resultBuffer =
3705 {.acquire_fence = -1,
3706 .release_fence = -1,
3707 .status = CAMERA3_BUFFER_STATUS_OK,
3708 .buffer = nullptr,
3709 .stream = mDepthChannel->getStream()};
3710 camera3_capture_result_t result =
3711 {.result = nullptr,
3712 .num_output_buffers = 1,
3713 .output_buffers = &resultBuffer,
3714 .partial_result = 0,
3715 .frame_number = 0};
3716
3717 do {
3718 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3719 if (nullptr == depthBuffer) {
3720 break;
3721 }
3722
3723 result.frame_number = currentFrameNumber;
3724 resultBuffer.buffer = depthBuffer;
3725 if (currentFrameNumber == frameNumber) {
3726 int32_t rc = mDepthChannel->populateDepthData(depthData,
3727 frameNumber);
3728 if (NO_ERROR != rc) {
3729 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3730 } else {
3731 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3732 }
3733 } else if (currentFrameNumber > frameNumber) {
3734 break;
3735 } else {
3736 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3737 {{currentFrameNumber, mDepthChannel->getStream(),
3738 CAMERA3_MSG_ERROR_BUFFER}}};
3739 orchestrateNotify(&notify_msg);
3740
3741 LOGE("Depth buffer for frame number: %d is missing "
3742 "returning back!", currentFrameNumber);
3743 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3744 }
3745 mDepthChannel->unmapBuffer(currentFrameNumber);
3746
3747 orchestrateResult(&result);
3748 } while (currentFrameNumber < frameNumber);
3749}
3750
3751/*===========================================================================
3752 * FUNCTION : notifyErrorFoPendingDepthData
3753 *
3754 * DESCRIPTION: Returns error for any pending depth buffers
3755 *
3756 * PARAMETERS : depthCh - depth channel that needs to get flushed
3757 *
3758 * RETURN :
3759 *
3760 *==========================================================================*/
3761void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
3762 QCamera3DepthChannel *depthCh) {
3763 uint32_t currentFrameNumber;
3764 buffer_handle_t *depthBuffer;
3765
3766 if (nullptr == depthCh) {
3767 return;
3768 }
3769
3770 camera3_notify_msg_t notify_msg =
3771 {.type = CAMERA3_MSG_ERROR,
3772 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
3773 camera3_stream_buffer_t resultBuffer =
3774 {.acquire_fence = -1,
3775 .release_fence = -1,
3776 .buffer = nullptr,
3777 .stream = depthCh->getStream(),
3778 .status = CAMERA3_BUFFER_STATUS_ERROR};
3779 camera3_capture_result_t result =
3780 {.result = nullptr,
3781 .frame_number = 0,
3782 .num_output_buffers = 1,
3783 .partial_result = 0,
3784 .output_buffers = &resultBuffer};
3785
3786 while (nullptr !=
3787 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
3788 depthCh->unmapBuffer(currentFrameNumber);
3789
3790 notify_msg.message.error.frame_number = currentFrameNumber;
3791 orchestrateNotify(&notify_msg);
3792
3793 resultBuffer.buffer = depthBuffer;
3794 result.frame_number = currentFrameNumber;
3795 orchestrateResult(&result);
3796 };
3797}
3798
3799/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07003800 * FUNCTION : hdrPlusPerfLock
3801 *
3802 * DESCRIPTION: perf lock for HDR+ using custom intent
3803 *
3804 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3805 *
3806 * RETURN : None
3807 *
3808 *==========================================================================*/
3809void QCamera3HardwareInterface::hdrPlusPerfLock(
3810 mm_camera_super_buf_t *metadata_buf)
3811{
3812 if (NULL == metadata_buf) {
3813 LOGE("metadata_buf is NULL");
3814 return;
3815 }
3816 metadata_buffer_t *metadata =
3817 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3818 int32_t *p_frame_number_valid =
3819 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3820 uint32_t *p_frame_number =
3821 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3822
3823 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3824 LOGE("%s: Invalid metadata", __func__);
3825 return;
3826 }
3827
3828 //acquire perf lock for 5 sec after the last HDR frame is captured
3829 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3830 if ((p_frame_number != NULL) &&
3831 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003832 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003833 }
3834 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003835}
3836
3837/*===========================================================================
3838 * FUNCTION : handleInputBufferWithLock
3839 *
3840 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3841 *
3842 * PARAMETERS : @frame_number: frame number of the input buffer
3843 *
3844 * RETURN :
3845 *
3846 *==========================================================================*/
3847void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3848{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003849 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003850 pendingRequestIterator i = mPendingRequestsList.begin();
3851 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3852 i++;
3853 }
3854 if (i != mPendingRequestsList.end() && i->input_buffer) {
3855 //found the right request
3856 if (!i->shutter_notified) {
3857 CameraMetadata settings;
3858 camera3_notify_msg_t notify_msg;
3859 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3860 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3861 if(i->settings) {
3862 settings = i->settings;
3863 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3864 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3865 } else {
3866 LOGE("No timestamp in input settings! Using current one.");
3867 }
3868 } else {
3869 LOGE("Input settings missing!");
3870 }
3871
3872 notify_msg.type = CAMERA3_MSG_SHUTTER;
3873 notify_msg.message.shutter.frame_number = frame_number;
3874 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003875 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003876 i->shutter_notified = true;
3877 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3878 i->frame_number, notify_msg.message.shutter.timestamp);
3879 }
3880
3881 if (i->input_buffer->release_fence != -1) {
3882 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3883 close(i->input_buffer->release_fence);
3884 if (rc != OK) {
3885 LOGE("input buffer sync wait failed %d", rc);
3886 }
3887 }
3888
3889 camera3_capture_result result;
3890 memset(&result, 0, sizeof(camera3_capture_result));
3891 result.frame_number = frame_number;
3892 result.result = i->settings;
3893 result.input_buffer = i->input_buffer;
3894 result.partial_result = PARTIAL_RESULT_COUNT;
3895
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003896 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003897 LOGD("Input request metadata and input buffer frame_number = %u",
3898 i->frame_number);
3899 i = erasePendingRequest(i);
3900 } else {
3901 LOGE("Could not find input request for frame number %d", frame_number);
3902 }
3903}
3904
3905/*===========================================================================
3906 * FUNCTION : handleBufferWithLock
3907 *
3908 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3909 *
3910 * PARAMETERS : @buffer: image buffer for the callback
3911 * @frame_number: frame number of the image buffer
3912 *
3913 * RETURN :
3914 *
3915 *==========================================================================*/
3916void QCamera3HardwareInterface::handleBufferWithLock(
3917 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3918{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003919 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003920
3921 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3922 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3923 }
3924
Thierry Strudel3d639192016-09-09 11:52:26 -07003925 /* Nothing to be done during error state */
3926 if ((ERROR == mState) || (DEINIT == mState)) {
3927 return;
3928 }
3929 if (mFlushPerf) {
3930 handleBuffersDuringFlushLock(buffer);
3931 return;
3932 }
3933 //not in flush
3934 // If the frame number doesn't exist in the pending request list,
3935 // directly send the buffer to the frameworks, and update pending buffers map
3936 // Otherwise, book-keep the buffer.
3937 pendingRequestIterator i = mPendingRequestsList.begin();
3938 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3939 i++;
3940 }
3941 if (i == mPendingRequestsList.end()) {
3942 // Verify all pending requests frame_numbers are greater
3943 for (pendingRequestIterator j = mPendingRequestsList.begin();
3944 j != mPendingRequestsList.end(); j++) {
3945 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3946 LOGW("Error: pending live frame number %d is smaller than %d",
3947 j->frame_number, frame_number);
3948 }
3949 }
3950 camera3_capture_result_t result;
3951 memset(&result, 0, sizeof(camera3_capture_result_t));
3952 result.result = NULL;
3953 result.frame_number = frame_number;
3954 result.num_output_buffers = 1;
3955 result.partial_result = 0;
3956 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3957 m != mPendingFrameDropList.end(); m++) {
3958 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3959 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3960 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3961 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3962 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3963 frame_number, streamID);
3964 m = mPendingFrameDropList.erase(m);
3965 break;
3966 }
3967 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003968 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003969 result.output_buffers = buffer;
3970 LOGH("result frame_number = %d, buffer = %p",
3971 frame_number, buffer->buffer);
3972
3973 mPendingBuffersMap.removeBuf(buffer->buffer);
3974
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003975 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003976 } else {
3977 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003978 if (i->input_buffer->release_fence != -1) {
3979 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3980 close(i->input_buffer->release_fence);
3981 if (rc != OK) {
3982 LOGE("input buffer sync wait failed %d", rc);
3983 }
3984 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003985 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003986
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003987 // Put buffer into the pending request
3988 for (auto &requestedBuffer : i->buffers) {
3989 if (requestedBuffer.stream == buffer->stream) {
3990 if (requestedBuffer.buffer != nullptr) {
3991 LOGE("Error: buffer is already set");
3992 } else {
3993 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3994 sizeof(camera3_stream_buffer_t));
3995 *(requestedBuffer.buffer) = *buffer;
3996 LOGH("cache buffer %p at result frame_number %u",
3997 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003998 }
3999 }
4000 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004001
4002 if (i->input_buffer) {
4003 // For a reprocessing request, try to send out shutter callback and result metadata.
4004 handlePendingResultsWithLock(frame_number, nullptr);
4005 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004006 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004007
4008 if (mPreviewStarted == false) {
4009 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4010 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004011 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4012
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004013 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4014 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4015 mPreviewStarted = true;
4016
4017 // Set power hint for preview
4018 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4019 }
4020 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004021}
4022
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004023void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
4024 const camera_metadata_t *resultMetadata)
4025{
4026 // Find the pending request for this result metadata.
4027 auto requestIter = mPendingRequestsList.begin();
4028 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4029 requestIter++;
4030 }
4031
4032 if (requestIter == mPendingRequestsList.end()) {
4033 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4034 return;
4035 }
4036
4037 // Update the result metadata
4038 requestIter->resultMetadata = resultMetadata;
4039
4040 // Check what type of request this is.
4041 bool liveRequest = false;
4042 if (requestIter->hdrplus) {
4043 // HDR+ request doesn't have partial results.
4044 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4045 } else if (requestIter->input_buffer != nullptr) {
4046 // Reprocessing request result is the same as settings.
4047 requestIter->resultMetadata = requestIter->settings;
4048 // Reprocessing request doesn't have partial results.
4049 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4050 } else {
4051 liveRequest = true;
4052 requestIter->partial_result_cnt++;
4053 mPendingLiveRequest--;
4054
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004055 {
4056 Mutex::Autolock l(gHdrPlusClientLock);
4057 // For a live request, send the metadata to HDR+ client.
4058 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4059 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4060 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4061 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004062 }
4063 }
4064
4065 // The pending requests are ordered by increasing frame numbers. The shutter callback and
4066 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
4067 bool readyToSend = true;
4068
4069 // Iterate through the pending requests to send out shutter callbacks and results that are
4070 // ready. Also if this result metadata belongs to a live request, notify errors for previous
4071 // live requests that don't have result metadata yet.
4072 auto iter = mPendingRequestsList.begin();
4073 while (iter != mPendingRequestsList.end()) {
4074 // Check if current pending request is ready. If it's not ready, the following pending
4075 // requests are also not ready.
4076 if (readyToSend && iter->resultMetadata == nullptr) {
4077 readyToSend = false;
4078 }
4079
4080 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4081
4082 std::vector<camera3_stream_buffer_t> outputBuffers;
4083
4084 camera3_capture_result_t result = {};
4085 result.frame_number = iter->frame_number;
4086 result.result = iter->resultMetadata;
4087 result.partial_result = iter->partial_result_cnt;
4088
4089 // If this pending buffer has result metadata, we may be able to send out shutter callback
4090 // and result metadata.
4091 if (iter->resultMetadata != nullptr) {
4092 if (!readyToSend) {
4093 // If any of the previous pending request is not ready, this pending request is
4094 // also not ready to send in order to keep shutter callbacks and result metadata
4095 // in order.
4096 iter++;
4097 continue;
4098 }
4099
4100 // Invoke shutter callback if not yet.
4101 if (!iter->shutter_notified) {
4102 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
4103
4104 // Find the timestamp in HDR+ result metadata
4105 camera_metadata_ro_entry_t entry;
4106 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
4107 ANDROID_SENSOR_TIMESTAMP, &entry);
4108 if (res != OK) {
4109 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
4110 __FUNCTION__, iter->frame_number, strerror(-res), res);
4111 } else {
4112 timestamp = entry.data.i64[0];
4113 }
4114
4115 camera3_notify_msg_t notify_msg = {};
4116 notify_msg.type = CAMERA3_MSG_SHUTTER;
4117 notify_msg.message.shutter.frame_number = iter->frame_number;
4118 notify_msg.message.shutter.timestamp = timestamp;
4119 orchestrateNotify(&notify_msg);
4120 iter->shutter_notified = true;
4121 }
4122
4123 result.input_buffer = iter->input_buffer;
4124
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004125 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
4126 // If the result metadata belongs to a live request, notify errors for previous pending
4127 // live requests.
4128 mPendingLiveRequest--;
4129
4130 CameraMetadata dummyMetadata;
4131 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4132 result.result = dummyMetadata.release();
4133
4134 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004135
4136 // partial_result should be PARTIAL_RESULT_CNT in case of
4137 // ERROR_RESULT.
4138 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4139 result.partial_result = PARTIAL_RESULT_COUNT;
4140
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004141 } else {
4142 iter++;
4143 continue;
4144 }
4145
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004146 // Prepare output buffer array
4147 for (auto bufferInfoIter = iter->buffers.begin();
4148 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
4149 if (bufferInfoIter->buffer != nullptr) {
4150
4151 QCamera3Channel *channel =
4152 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
4153 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4154
4155 // Check if this buffer is a dropped frame.
4156 auto frameDropIter = mPendingFrameDropList.begin();
4157 while (frameDropIter != mPendingFrameDropList.end()) {
4158 if((frameDropIter->stream_ID == streamID) &&
4159 (frameDropIter->frame_number == frameNumber)) {
4160 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
4161 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
4162 streamID);
4163 mPendingFrameDropList.erase(frameDropIter);
4164 break;
4165 } else {
4166 frameDropIter++;
4167 }
4168 }
4169
4170 // Check buffer error status
4171 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
4172 bufferInfoIter->buffer->buffer);
4173 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
4174
4175 outputBuffers.push_back(*(bufferInfoIter->buffer));
4176 free(bufferInfoIter->buffer);
4177 bufferInfoIter->buffer = NULL;
4178 }
4179 }
4180
4181 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
4182 result.num_output_buffers = outputBuffers.size();
4183
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004184 orchestrateResult(&result);
4185
4186 // For reprocessing, result metadata is the same as settings so do not free it here to
4187 // avoid double free.
4188 if (result.result != iter->settings) {
4189 free_camera_metadata((camera_metadata_t *)result.result);
4190 }
4191 iter->resultMetadata = nullptr;
4192 iter = erasePendingRequest(iter);
4193 }
4194
4195 if (liveRequest) {
4196 for (auto &iter : mPendingRequestsList) {
4197 // Increment pipeline depth for the following pending requests.
4198 if (iter.frame_number > frameNumber) {
4199 iter.pipeline_depth++;
4200 }
4201 }
4202 }
4203
4204 unblockRequestIfNecessary();
4205}
4206
Thierry Strudel3d639192016-09-09 11:52:26 -07004207/*===========================================================================
4208 * FUNCTION : unblockRequestIfNecessary
4209 *
4210 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4211 * that mMutex is held when this function is called.
4212 *
4213 * PARAMETERS :
4214 *
4215 * RETURN :
4216 *
4217 *==========================================================================*/
4218void QCamera3HardwareInterface::unblockRequestIfNecessary()
4219{
4220 // Unblock process_capture_request
4221 pthread_cond_signal(&mRequestCond);
4222}
4223
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004224/*===========================================================================
4225 * FUNCTION : isHdrSnapshotRequest
4226 *
4227 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4228 *
4229 * PARAMETERS : camera3 request structure
4230 *
4231 * RETURN : boolean decision variable
4232 *
4233 *==========================================================================*/
4234bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4235{
4236 if (request == NULL) {
4237 LOGE("Invalid request handle");
4238 assert(0);
4239 return false;
4240 }
4241
4242 if (!mForceHdrSnapshot) {
4243 CameraMetadata frame_settings;
4244 frame_settings = request->settings;
4245
4246 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4247 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4248 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4249 return false;
4250 }
4251 } else {
4252 return false;
4253 }
4254
4255 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4256 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4257 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4258 return false;
4259 }
4260 } else {
4261 return false;
4262 }
4263 }
4264
4265 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4266 if (request->output_buffers[i].stream->format
4267 == HAL_PIXEL_FORMAT_BLOB) {
4268 return true;
4269 }
4270 }
4271
4272 return false;
4273}
4274/*===========================================================================
4275 * FUNCTION : orchestrateRequest
4276 *
4277 * DESCRIPTION: Orchestrates a capture request from camera service
4278 *
4279 * PARAMETERS :
4280 * @request : request from framework to process
4281 *
4282 * RETURN : Error status codes
4283 *
4284 *==========================================================================*/
4285int32_t QCamera3HardwareInterface::orchestrateRequest(
4286 camera3_capture_request_t *request)
4287{
4288
4289 uint32_t originalFrameNumber = request->frame_number;
4290 uint32_t originalOutputCount = request->num_output_buffers;
4291 const camera_metadata_t *original_settings = request->settings;
4292 List<InternalRequest> internallyRequestedStreams;
4293 List<InternalRequest> emptyInternalList;
4294
4295 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4296 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4297 uint32_t internalFrameNumber;
4298 CameraMetadata modified_meta;
4299
4300
4301 /* Add Blob channel to list of internally requested streams */
4302 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4303 if (request->output_buffers[i].stream->format
4304 == HAL_PIXEL_FORMAT_BLOB) {
4305 InternalRequest streamRequested;
4306 streamRequested.meteringOnly = 1;
4307 streamRequested.need_metadata = 0;
4308 streamRequested.stream = request->output_buffers[i].stream;
4309 internallyRequestedStreams.push_back(streamRequested);
4310 }
4311 }
4312 request->num_output_buffers = 0;
4313 auto itr = internallyRequestedStreams.begin();
4314
4315 /* Modify setting to set compensation */
4316 modified_meta = request->settings;
4317 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4318 uint8_t aeLock = 1;
4319 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4320 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4321 camera_metadata_t *modified_settings = modified_meta.release();
4322 request->settings = modified_settings;
4323
4324 /* Capture Settling & -2x frame */
4325 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4326 request->frame_number = internalFrameNumber;
4327 processCaptureRequest(request, internallyRequestedStreams);
4328
4329 request->num_output_buffers = originalOutputCount;
4330 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4331 request->frame_number = internalFrameNumber;
4332 processCaptureRequest(request, emptyInternalList);
4333 request->num_output_buffers = 0;
4334
4335 modified_meta = modified_settings;
4336 expCompensation = 0;
4337 aeLock = 1;
4338 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4339 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4340 modified_settings = modified_meta.release();
4341 request->settings = modified_settings;
4342
4343 /* Capture Settling & 0X frame */
4344
4345 itr = internallyRequestedStreams.begin();
4346 if (itr == internallyRequestedStreams.end()) {
4347 LOGE("Error Internally Requested Stream list is empty");
4348 assert(0);
4349 } else {
4350 itr->need_metadata = 0;
4351 itr->meteringOnly = 1;
4352 }
4353
4354 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4355 request->frame_number = internalFrameNumber;
4356 processCaptureRequest(request, internallyRequestedStreams);
4357
4358 itr = internallyRequestedStreams.begin();
4359 if (itr == internallyRequestedStreams.end()) {
4360 ALOGE("Error Internally Requested Stream list is empty");
4361 assert(0);
4362 } else {
4363 itr->need_metadata = 1;
4364 itr->meteringOnly = 0;
4365 }
4366
4367 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4368 request->frame_number = internalFrameNumber;
4369 processCaptureRequest(request, internallyRequestedStreams);
4370
4371 /* Capture 2X frame*/
4372 modified_meta = modified_settings;
4373 expCompensation = GB_HDR_2X_STEP_EV;
4374 aeLock = 1;
4375 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4376 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4377 modified_settings = modified_meta.release();
4378 request->settings = modified_settings;
4379
4380 itr = internallyRequestedStreams.begin();
4381 if (itr == internallyRequestedStreams.end()) {
4382 ALOGE("Error Internally Requested Stream list is empty");
4383 assert(0);
4384 } else {
4385 itr->need_metadata = 0;
4386 itr->meteringOnly = 1;
4387 }
4388 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4389 request->frame_number = internalFrameNumber;
4390 processCaptureRequest(request, internallyRequestedStreams);
4391
4392 itr = internallyRequestedStreams.begin();
4393 if (itr == internallyRequestedStreams.end()) {
4394 ALOGE("Error Internally Requested Stream list is empty");
4395 assert(0);
4396 } else {
4397 itr->need_metadata = 1;
4398 itr->meteringOnly = 0;
4399 }
4400
4401 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4402 request->frame_number = internalFrameNumber;
4403 processCaptureRequest(request, internallyRequestedStreams);
4404
4405
4406 /* Capture 2X on original streaming config*/
4407 internallyRequestedStreams.clear();
4408
4409 /* Restore original settings pointer */
4410 request->settings = original_settings;
4411 } else {
4412 uint32_t internalFrameNumber;
4413 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4414 request->frame_number = internalFrameNumber;
4415 return processCaptureRequest(request, internallyRequestedStreams);
4416 }
4417
4418 return NO_ERROR;
4419}
4420
4421/*===========================================================================
4422 * FUNCTION : orchestrateResult
4423 *
4424 * DESCRIPTION: Orchestrates a capture result to camera service
4425 *
4426 * PARAMETERS :
4427 * @request : request from framework to process
4428 *
4429 * RETURN :
4430 *
4431 *==========================================================================*/
4432void QCamera3HardwareInterface::orchestrateResult(
4433 camera3_capture_result_t *result)
4434{
4435 uint32_t frameworkFrameNumber;
4436 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4437 frameworkFrameNumber);
4438 if (rc != NO_ERROR) {
4439 LOGE("Cannot find translated frameworkFrameNumber");
4440 assert(0);
4441 } else {
4442 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004443 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004444 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004445 if (result->result != NULL) {
4446 CameraMetadata metadata;
4447 metadata.acquire((camera_metadata_t *)result->result);
4448 if (metadata.exists(ANDROID_SYNC_FRAME_NUMBER)) {
4449 int64_t sync_frame_number = frameworkFrameNumber;
4450 metadata.update(ANDROID_SYNC_FRAME_NUMBER, &sync_frame_number, 1);
4451 }
4452 result->result = metadata.release();
4453 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004454 result->frame_number = frameworkFrameNumber;
4455 mCallbackOps->process_capture_result(mCallbackOps, result);
4456 }
4457 }
4458}
4459
4460/*===========================================================================
4461 * FUNCTION : orchestrateNotify
4462 *
4463 * DESCRIPTION: Orchestrates a notify to camera service
4464 *
4465 * PARAMETERS :
4466 * @request : request from framework to process
4467 *
4468 * RETURN :
4469 *
4470 *==========================================================================*/
4471void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4472{
4473 uint32_t frameworkFrameNumber;
4474 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004475 int32_t rc = NO_ERROR;
4476
4477 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004478 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004479
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004480 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004481 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4482 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4483 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004484 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004485 LOGE("Cannot find translated frameworkFrameNumber");
4486 assert(0);
4487 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004488 }
4489 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004490
4491 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4492 LOGD("Internal Request drop the notifyCb");
4493 } else {
4494 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4495 mCallbackOps->notify(mCallbackOps, notify_msg);
4496 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004497}
4498
4499/*===========================================================================
4500 * FUNCTION : FrameNumberRegistry
4501 *
4502 * DESCRIPTION: Constructor
4503 *
4504 * PARAMETERS :
4505 *
4506 * RETURN :
4507 *
4508 *==========================================================================*/
4509FrameNumberRegistry::FrameNumberRegistry()
4510{
4511 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4512}
4513
4514/*===========================================================================
4515 * FUNCTION : ~FrameNumberRegistry
4516 *
4517 * DESCRIPTION: Destructor
4518 *
4519 * PARAMETERS :
4520 *
4521 * RETURN :
4522 *
4523 *==========================================================================*/
4524FrameNumberRegistry::~FrameNumberRegistry()
4525{
4526}
4527
4528/*===========================================================================
4529 * FUNCTION : PurgeOldEntriesLocked
4530 *
4531 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4532 *
4533 * PARAMETERS :
4534 *
4535 * RETURN : NONE
4536 *
4537 *==========================================================================*/
4538void FrameNumberRegistry::purgeOldEntriesLocked()
4539{
4540 while (_register.begin() != _register.end()) {
4541 auto itr = _register.begin();
4542 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4543 _register.erase(itr);
4544 } else {
4545 return;
4546 }
4547 }
4548}
4549
4550/*===========================================================================
4551 * FUNCTION : allocStoreInternalFrameNumber
4552 *
4553 * DESCRIPTION: Method to note down a framework request and associate a new
4554 * internal request number against it
4555 *
4556 * PARAMETERS :
4557 * @fFrameNumber: Identifier given by framework
4558 * @internalFN : Output parameter which will have the newly generated internal
4559 * entry
4560 *
4561 * RETURN : Error code
4562 *
4563 *==========================================================================*/
4564int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4565 uint32_t &internalFrameNumber)
4566{
4567 Mutex::Autolock lock(mRegistryLock);
4568 internalFrameNumber = _nextFreeInternalNumber++;
4569 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4570 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4571 purgeOldEntriesLocked();
4572 return NO_ERROR;
4573}
4574
4575/*===========================================================================
4576 * FUNCTION : generateStoreInternalFrameNumber
4577 *
4578 * DESCRIPTION: Method to associate a new internal request number independent
4579 * of any associate with framework requests
4580 *
4581 * PARAMETERS :
4582 * @internalFrame#: Output parameter which will have the newly generated internal
4583 *
4584 *
4585 * RETURN : Error code
4586 *
4587 *==========================================================================*/
4588int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4589{
4590 Mutex::Autolock lock(mRegistryLock);
4591 internalFrameNumber = _nextFreeInternalNumber++;
4592 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4593 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4594 purgeOldEntriesLocked();
4595 return NO_ERROR;
4596}
4597
4598/*===========================================================================
4599 * FUNCTION : getFrameworkFrameNumber
4600 *
4601 * DESCRIPTION: Method to query the framework framenumber given an internal #
4602 *
4603 * PARAMETERS :
4604 * @internalFrame#: Internal reference
4605 * @frameworkframenumber: Output parameter holding framework frame entry
4606 *
4607 * RETURN : Error code
4608 *
4609 *==========================================================================*/
4610int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4611 uint32_t &frameworkFrameNumber)
4612{
4613 Mutex::Autolock lock(mRegistryLock);
4614 auto itr = _register.find(internalFrameNumber);
4615 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004616 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004617 return -ENOENT;
4618 }
4619
4620 frameworkFrameNumber = itr->second;
4621 purgeOldEntriesLocked();
4622 return NO_ERROR;
4623}
Thierry Strudel3d639192016-09-09 11:52:26 -07004624
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004625status_t QCamera3HardwareInterface::fillPbStreamConfig(
4626 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4627 QCamera3Channel *channel, uint32_t streamIndex) {
4628 if (config == nullptr) {
4629 LOGE("%s: config is null", __FUNCTION__);
4630 return BAD_VALUE;
4631 }
4632
4633 if (channel == nullptr) {
4634 LOGE("%s: channel is null", __FUNCTION__);
4635 return BAD_VALUE;
4636 }
4637
4638 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4639 if (stream == nullptr) {
4640 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4641 return NAME_NOT_FOUND;
4642 }
4643
4644 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4645 if (streamInfo == nullptr) {
4646 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4647 return NAME_NOT_FOUND;
4648 }
4649
4650 config->id = pbStreamId;
4651 config->image.width = streamInfo->dim.width;
4652 config->image.height = streamInfo->dim.height;
4653 config->image.padding = 0;
4654 config->image.format = pbStreamFormat;
4655
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004656 uint32_t totalPlaneSize = 0;
4657
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004658 // Fill plane information.
4659 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4660 pbcamera::PlaneConfiguration plane;
4661 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4662 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4663 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004664
4665 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004666 }
4667
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004668 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004669 return OK;
4670}
4671
Thierry Strudel3d639192016-09-09 11:52:26 -07004672/*===========================================================================
4673 * FUNCTION : processCaptureRequest
4674 *
4675 * DESCRIPTION: process a capture request from camera service
4676 *
4677 * PARAMETERS :
4678 * @request : request from framework to process
4679 *
4680 * RETURN :
4681 *
4682 *==========================================================================*/
4683int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004684 camera3_capture_request_t *request,
4685 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004686{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004687 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004688 int rc = NO_ERROR;
4689 int32_t request_id;
4690 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004691 bool isVidBufRequested = false;
4692 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004693 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004694
4695 pthread_mutex_lock(&mMutex);
4696
4697 // Validate current state
4698 switch (mState) {
4699 case CONFIGURED:
4700 case STARTED:
4701 /* valid state */
4702 break;
4703
4704 case ERROR:
4705 pthread_mutex_unlock(&mMutex);
4706 handleCameraDeviceError();
4707 return -ENODEV;
4708
4709 default:
4710 LOGE("Invalid state %d", mState);
4711 pthread_mutex_unlock(&mMutex);
4712 return -ENODEV;
4713 }
4714
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004715 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004716 if (rc != NO_ERROR) {
4717 LOGE("incoming request is not valid");
4718 pthread_mutex_unlock(&mMutex);
4719 return rc;
4720 }
4721
4722 meta = request->settings;
4723
4724 // For first capture request, send capture intent, and
4725 // stream on all streams
4726 if (mState == CONFIGURED) {
4727 // send an unconfigure to the backend so that the isp
4728 // resources are deallocated
4729 if (!mFirstConfiguration) {
4730 cam_stream_size_info_t stream_config_info;
4731 int32_t hal_version = CAM_HAL_V3;
4732 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4733 stream_config_info.buffer_info.min_buffers =
4734 MIN_INFLIGHT_REQUESTS;
4735 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004736 m_bIs4KVideo ? 0 :
4737 m_bEis3PropertyEnabled ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004738 clear_metadata_buffer(mParameters);
4739 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4740 CAM_INTF_PARM_HAL_VERSION, hal_version);
4741 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4742 CAM_INTF_META_STREAM_INFO, stream_config_info);
4743 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4744 mParameters);
4745 if (rc < 0) {
4746 LOGE("set_parms for unconfigure failed");
4747 pthread_mutex_unlock(&mMutex);
4748 return rc;
4749 }
4750 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004751 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004752 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004753 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004754 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004755 property_get("persist.camera.is_type", is_type_value, "4");
4756 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4757 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4758 property_get("persist.camera.is_type_preview", is_type_value, "4");
4759 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4760 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004761
4762 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4763 int32_t hal_version = CAM_HAL_V3;
4764 uint8_t captureIntent =
4765 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4766 mCaptureIntent = captureIntent;
4767 clear_metadata_buffer(mParameters);
4768 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4769 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4770 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004771 if (mFirstConfiguration) {
4772 // configure instant AEC
4773 // Instant AEC is a session based parameter and it is needed only
4774 // once per complete session after open camera.
4775 // i.e. This is set only once for the first capture request, after open camera.
4776 setInstantAEC(meta);
4777 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004778 uint8_t fwkVideoStabMode=0;
4779 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4780 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4781 }
4782
Xue Tuecac74e2017-04-17 13:58:15 -07004783 // If EIS setprop is enabled then only turn it on for video/preview
4784 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004785 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004786 int32_t vsMode;
4787 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4788 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4789 rc = BAD_VALUE;
4790 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004791 LOGD("setEis %d", setEis);
4792 bool eis3Supported = false;
4793 size_t count = IS_TYPE_MAX;
4794 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4795 for (size_t i = 0; i < count; i++) {
4796 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4797 eis3Supported = true;
4798 break;
4799 }
4800 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004801
4802 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004803 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004804 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4805 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004806 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4807 is_type = isTypePreview;
4808 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4809 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4810 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004811 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004812 } else {
4813 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004814 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004815 } else {
4816 is_type = IS_TYPE_NONE;
4817 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004818 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004819 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004820 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4821 }
4822 }
4823
4824 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4825 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4826
Thierry Strudel54dc9782017-02-15 12:12:10 -08004827 //Disable tintless only if the property is set to 0
4828 memset(prop, 0, sizeof(prop));
4829 property_get("persist.camera.tintless.enable", prop, "1");
4830 int32_t tintless_value = atoi(prop);
4831
Thierry Strudel3d639192016-09-09 11:52:26 -07004832 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4833 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004834
Thierry Strudel3d639192016-09-09 11:52:26 -07004835 //Disable CDS for HFR mode or if DIS/EIS is on.
4836 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4837 //after every configure_stream
4838 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4839 (m_bIsVideo)) {
4840 int32_t cds = CAM_CDS_MODE_OFF;
4841 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4842 CAM_INTF_PARM_CDS_MODE, cds))
4843 LOGE("Failed to disable CDS for HFR mode");
4844
4845 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004846
4847 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4848 uint8_t* use_av_timer = NULL;
4849
4850 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004851 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004852 use_av_timer = &m_debug_avtimer;
4853 }
4854 else{
4855 use_av_timer =
4856 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004857 if (use_av_timer) {
4858 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4859 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004860 }
4861
4862 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4863 rc = BAD_VALUE;
4864 }
4865 }
4866
Thierry Strudel3d639192016-09-09 11:52:26 -07004867 setMobicat();
4868
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004869 uint8_t nrMode = 0;
4870 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
4871 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
4872 }
4873
Thierry Strudel3d639192016-09-09 11:52:26 -07004874 /* Set fps and hfr mode while sending meta stream info so that sensor
4875 * can configure appropriate streaming mode */
4876 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004877 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4878 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004879 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4880 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004881 if (rc == NO_ERROR) {
4882 int32_t max_fps =
4883 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004884 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004885 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4886 }
4887 /* For HFR, more buffers are dequeued upfront to improve the performance */
4888 if (mBatchSize) {
4889 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4890 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4891 }
4892 }
4893 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004894 LOGE("setHalFpsRange failed");
4895 }
4896 }
4897 if (meta.exists(ANDROID_CONTROL_MODE)) {
4898 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4899 rc = extractSceneMode(meta, metaMode, mParameters);
4900 if (rc != NO_ERROR) {
4901 LOGE("extractSceneMode failed");
4902 }
4903 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004904 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004905
Thierry Strudel04e026f2016-10-10 11:27:36 -07004906 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4907 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4908 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4909 rc = setVideoHdrMode(mParameters, vhdr);
4910 if (rc != NO_ERROR) {
4911 LOGE("setVideoHDR is failed");
4912 }
4913 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004914
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07004915 if (meta.exists(NEXUS_EXPERIMENTAL_2017_SENSOR_MODE_FULLFOV)) {
4916 uint8_t sensorModeFullFov =
4917 meta.find(NEXUS_EXPERIMENTAL_2017_SENSOR_MODE_FULLFOV).data.u8[0];
4918 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
4919 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
4920 sensorModeFullFov)) {
4921 rc = BAD_VALUE;
4922 }
4923 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004924 //TODO: validate the arguments, HSV scenemode should have only the
4925 //advertised fps ranges
4926
4927 /*set the capture intent, hal version, tintless, stream info,
4928 *and disenable parameters to the backend*/
4929 LOGD("set_parms META_STREAM_INFO " );
4930 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004931 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
4932 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004933 mStreamConfigInfo.type[i],
4934 mStreamConfigInfo.stream_sizes[i].width,
4935 mStreamConfigInfo.stream_sizes[i].height,
4936 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004937 mStreamConfigInfo.format[i],
4938 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004939 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004940
Thierry Strudel3d639192016-09-09 11:52:26 -07004941 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4942 mParameters);
4943 if (rc < 0) {
4944 LOGE("set_parms failed for hal version, stream info");
4945 }
4946
Chien-Yu Chenee335912017-02-09 17:53:20 -08004947 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
4948 rc = getSensorModeInfo(mSensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07004949 if (rc != NO_ERROR) {
4950 LOGE("Failed to get sensor output size");
4951 pthread_mutex_unlock(&mMutex);
4952 goto error_exit;
4953 }
4954
4955 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4956 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chenee335912017-02-09 17:53:20 -08004957 mSensorModeInfo.active_array_size.width,
4958 mSensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004959
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004960 {
4961 Mutex::Autolock l(gHdrPlusClientLock);
4962 if (EaselManagerClientOpened) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004963 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004964 rc = gEaselManagerClient.startMipi(mCameraId, mSensorModeInfo.op_pixel_clk);
4965 if (rc != OK) {
4966 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
4967 mCameraId, mSensorModeInfo.op_pixel_clk);
4968 pthread_mutex_unlock(&mMutex);
4969 goto error_exit;
4970 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08004971 }
4972 }
4973
Thierry Strudel3d639192016-09-09 11:52:26 -07004974 /* Set batchmode before initializing channel. Since registerBuffer
4975 * internally initializes some of the channels, better set batchmode
4976 * even before first register buffer */
4977 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4978 it != mStreamInfo.end(); it++) {
4979 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4980 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4981 && mBatchSize) {
4982 rc = channel->setBatchSize(mBatchSize);
4983 //Disable per frame map unmap for HFR/batchmode case
4984 rc |= channel->setPerFrameMapUnmap(false);
4985 if (NO_ERROR != rc) {
4986 LOGE("Channel init failed %d", rc);
4987 pthread_mutex_unlock(&mMutex);
4988 goto error_exit;
4989 }
4990 }
4991 }
4992
4993 //First initialize all streams
4994 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4995 it != mStreamInfo.end(); it++) {
4996 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01004997
4998 /* Initial value of NR mode is needed before stream on */
4999 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005000 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5001 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005002 setEis) {
5003 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5004 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5005 is_type = mStreamConfigInfo.is_type[i];
5006 break;
5007 }
5008 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005010 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005011 rc = channel->initialize(IS_TYPE_NONE);
5012 }
5013 if (NO_ERROR != rc) {
5014 LOGE("Channel initialization failed %d", rc);
5015 pthread_mutex_unlock(&mMutex);
5016 goto error_exit;
5017 }
5018 }
5019
5020 if (mRawDumpChannel) {
5021 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5022 if (rc != NO_ERROR) {
5023 LOGE("Error: Raw Dump Channel init failed");
5024 pthread_mutex_unlock(&mMutex);
5025 goto error_exit;
5026 }
5027 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005028 if (mHdrPlusRawSrcChannel) {
5029 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5030 if (rc != NO_ERROR) {
5031 LOGE("Error: HDR+ RAW Source Channel init failed");
5032 pthread_mutex_unlock(&mMutex);
5033 goto error_exit;
5034 }
5035 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005036 if (mSupportChannel) {
5037 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5038 if (rc < 0) {
5039 LOGE("Support channel initialization failed");
5040 pthread_mutex_unlock(&mMutex);
5041 goto error_exit;
5042 }
5043 }
5044 if (mAnalysisChannel) {
5045 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5046 if (rc < 0) {
5047 LOGE("Analysis channel initialization failed");
5048 pthread_mutex_unlock(&mMutex);
5049 goto error_exit;
5050 }
5051 }
5052 if (mDummyBatchChannel) {
5053 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5054 if (rc < 0) {
5055 LOGE("mDummyBatchChannel setBatchSize failed");
5056 pthread_mutex_unlock(&mMutex);
5057 goto error_exit;
5058 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005059 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005060 if (rc < 0) {
5061 LOGE("mDummyBatchChannel initialization failed");
5062 pthread_mutex_unlock(&mMutex);
5063 goto error_exit;
5064 }
5065 }
5066
5067 // Set bundle info
5068 rc = setBundleInfo();
5069 if (rc < 0) {
5070 LOGE("setBundleInfo failed %d", rc);
5071 pthread_mutex_unlock(&mMutex);
5072 goto error_exit;
5073 }
5074
5075 //update settings from app here
5076 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5077 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5078 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5079 }
5080 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5081 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5082 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5083 }
5084 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5085 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5086 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5087
5088 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5089 (mLinkedCameraId != mCameraId) ) {
5090 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5091 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005092 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005093 goto error_exit;
5094 }
5095 }
5096
5097 // add bundle related cameras
5098 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5099 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005100 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5101 &m_pDualCamCmdPtr->bundle_info;
5102 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005103 if (mIsDeviceLinked)
5104 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5105 else
5106 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5107
5108 pthread_mutex_lock(&gCamLock);
5109
5110 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5111 LOGE("Dualcam: Invalid Session Id ");
5112 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005113 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005114 goto error_exit;
5115 }
5116
5117 if (mIsMainCamera == 1) {
5118 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5119 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005120 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005121 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005122 // related session id should be session id of linked session
5123 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5124 } else {
5125 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5126 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005127 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005128 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005129 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5130 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005131 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005132 pthread_mutex_unlock(&gCamLock);
5133
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005134 rc = mCameraHandle->ops->set_dual_cam_cmd(
5135 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005136 if (rc < 0) {
5137 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005138 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005139 goto error_exit;
5140 }
5141 }
5142
5143 //Then start them.
5144 LOGH("Start META Channel");
5145 rc = mMetadataChannel->start();
5146 if (rc < 0) {
5147 LOGE("META channel start failed");
5148 pthread_mutex_unlock(&mMutex);
5149 goto error_exit;
5150 }
5151
5152 if (mAnalysisChannel) {
5153 rc = mAnalysisChannel->start();
5154 if (rc < 0) {
5155 LOGE("Analysis channel start failed");
5156 mMetadataChannel->stop();
5157 pthread_mutex_unlock(&mMutex);
5158 goto error_exit;
5159 }
5160 }
5161
5162 if (mSupportChannel) {
5163 rc = mSupportChannel->start();
5164 if (rc < 0) {
5165 LOGE("Support channel start failed");
5166 mMetadataChannel->stop();
5167 /* Although support and analysis are mutually exclusive today
5168 adding it in anycase for future proofing */
5169 if (mAnalysisChannel) {
5170 mAnalysisChannel->stop();
5171 }
5172 pthread_mutex_unlock(&mMutex);
5173 goto error_exit;
5174 }
5175 }
5176 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5177 it != mStreamInfo.end(); it++) {
5178 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5179 LOGH("Start Processing Channel mask=%d",
5180 channel->getStreamTypeMask());
5181 rc = channel->start();
5182 if (rc < 0) {
5183 LOGE("channel start failed");
5184 pthread_mutex_unlock(&mMutex);
5185 goto error_exit;
5186 }
5187 }
5188
5189 if (mRawDumpChannel) {
5190 LOGD("Starting raw dump stream");
5191 rc = mRawDumpChannel->start();
5192 if (rc != NO_ERROR) {
5193 LOGE("Error Starting Raw Dump Channel");
5194 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5195 it != mStreamInfo.end(); it++) {
5196 QCamera3Channel *channel =
5197 (QCamera3Channel *)(*it)->stream->priv;
5198 LOGH("Stopping Processing Channel mask=%d",
5199 channel->getStreamTypeMask());
5200 channel->stop();
5201 }
5202 if (mSupportChannel)
5203 mSupportChannel->stop();
5204 if (mAnalysisChannel) {
5205 mAnalysisChannel->stop();
5206 }
5207 mMetadataChannel->stop();
5208 pthread_mutex_unlock(&mMutex);
5209 goto error_exit;
5210 }
5211 }
5212
5213 if (mChannelHandle) {
5214
5215 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5216 mChannelHandle);
5217 if (rc != NO_ERROR) {
5218 LOGE("start_channel failed %d", rc);
5219 pthread_mutex_unlock(&mMutex);
5220 goto error_exit;
5221 }
5222 }
5223
5224 goto no_error;
5225error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005226 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005227 return rc;
5228no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005229 mWokenUpByDaemon = false;
5230 mPendingLiveRequest = 0;
5231 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005232 }
5233
Chien-Yu Chenee335912017-02-09 17:53:20 -08005234 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005235 {
5236 Mutex::Autolock l(gHdrPlusClientLock);
5237 if (gEaselManagerClient.isEaselPresentOnDevice() &&
5238 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
5239 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
5240 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
5241 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
5242 rc = enableHdrPlusModeLocked();
Chien-Yu Chenee335912017-02-09 17:53:20 -08005243 if (rc != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005244 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -08005245 pthread_mutex_unlock(&mMutex);
5246 return rc;
5247 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005248
5249 mFirstPreviewIntentSeen = true;
Chien-Yu Chenee335912017-02-09 17:53:20 -08005250 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08005251 }
5252
Thierry Strudel3d639192016-09-09 11:52:26 -07005253 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005254 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005255
5256 if (mFlushPerf) {
5257 //we cannot accept any requests during flush
5258 LOGE("process_capture_request cannot proceed during flush");
5259 pthread_mutex_unlock(&mMutex);
5260 return NO_ERROR; //should return an error
5261 }
5262
5263 if (meta.exists(ANDROID_REQUEST_ID)) {
5264 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5265 mCurrentRequestId = request_id;
5266 LOGD("Received request with id: %d", request_id);
5267 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5268 LOGE("Unable to find request id field, \
5269 & no previous id available");
5270 pthread_mutex_unlock(&mMutex);
5271 return NAME_NOT_FOUND;
5272 } else {
5273 LOGD("Re-using old request id");
5274 request_id = mCurrentRequestId;
5275 }
5276
5277 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5278 request->num_output_buffers,
5279 request->input_buffer,
5280 frameNumber);
5281 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005282 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005283 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005284 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005285 uint32_t snapshotStreamId = 0;
5286 for (size_t i = 0; i < request->num_output_buffers; i++) {
5287 const camera3_stream_buffer_t& output = request->output_buffers[i];
5288 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5289
Emilian Peev7650c122017-01-19 08:24:33 -08005290 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5291 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005292 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005293 blob_request = 1;
5294 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5295 }
5296
5297 if (output.acquire_fence != -1) {
5298 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5299 close(output.acquire_fence);
5300 if (rc != OK) {
5301 LOGE("sync wait failed %d", rc);
5302 pthread_mutex_unlock(&mMutex);
5303 return rc;
5304 }
5305 }
5306
Emilian Peev0f3c3162017-03-15 12:57:46 +00005307 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5308 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005309 depthRequestPresent = true;
5310 continue;
5311 }
5312
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005313 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005314 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005315
5316 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5317 isVidBufRequested = true;
5318 }
5319 }
5320
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005321 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5322 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5323 itr++) {
5324 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5325 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5326 channel->getStreamID(channel->getStreamTypeMask());
5327
5328 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5329 isVidBufRequested = true;
5330 }
5331 }
5332
Thierry Strudel3d639192016-09-09 11:52:26 -07005333 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005334 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005335 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005336 }
5337 if (blob_request && mRawDumpChannel) {
5338 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005339 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005340 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005341 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005342 }
5343
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005344 {
5345 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5346 // Request a RAW buffer if
5347 // 1. mHdrPlusRawSrcChannel is valid.
5348 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5349 // 3. There is no pending HDR+ request.
5350 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5351 mHdrPlusPendingRequests.size() == 0) {
5352 streamsArray.stream_request[streamsArray.num_streams].streamID =
5353 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5354 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5355 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005356 }
5357
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005358 //extract capture intent
5359 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5360 mCaptureIntent =
5361 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5362 }
5363
5364 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5365 mCacMode =
5366 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5367 }
5368
5369 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005370 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005371
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005372 {
5373 Mutex::Autolock l(gHdrPlusClientLock);
5374 // If this request has a still capture intent, try to submit an HDR+ request.
5375 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5376 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5377 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5378 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005379 }
5380
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005381 if (hdrPlusRequest) {
5382 // For a HDR+ request, just set the frame parameters.
5383 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5384 if (rc < 0) {
5385 LOGE("fail to set frame parameters");
5386 pthread_mutex_unlock(&mMutex);
5387 return rc;
5388 }
5389 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005390 /* Parse the settings:
5391 * - For every request in NORMAL MODE
5392 * - For every request in HFR mode during preview only case
5393 * - For first request of every batch in HFR mode during video
5394 * recording. In batchmode the same settings except frame number is
5395 * repeated in each request of the batch.
5396 */
5397 if (!mBatchSize ||
5398 (mBatchSize && !isVidBufRequested) ||
5399 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005400 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005401 if (rc < 0) {
5402 LOGE("fail to set frame parameters");
5403 pthread_mutex_unlock(&mMutex);
5404 return rc;
5405 }
5406 }
5407 /* For batchMode HFR, setFrameParameters is not called for every
5408 * request. But only frame number of the latest request is parsed.
5409 * Keep track of first and last frame numbers in a batch so that
5410 * metadata for the frame numbers of batch can be duplicated in
5411 * handleBatchMetadta */
5412 if (mBatchSize) {
5413 if (!mToBeQueuedVidBufs) {
5414 //start of the batch
5415 mFirstFrameNumberInBatch = request->frame_number;
5416 }
5417 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5418 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5419 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005420 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005421 return BAD_VALUE;
5422 }
5423 }
5424 if (mNeedSensorRestart) {
5425 /* Unlock the mutex as restartSensor waits on the channels to be
5426 * stopped, which in turn calls stream callback functions -
5427 * handleBufferWithLock and handleMetadataWithLock */
5428 pthread_mutex_unlock(&mMutex);
5429 rc = dynamicUpdateMetaStreamInfo();
5430 if (rc != NO_ERROR) {
5431 LOGE("Restarting the sensor failed");
5432 return BAD_VALUE;
5433 }
5434 mNeedSensorRestart = false;
5435 pthread_mutex_lock(&mMutex);
5436 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005437 if(mResetInstantAEC) {
5438 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5439 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5440 mResetInstantAEC = false;
5441 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005442 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005443 if (request->input_buffer->acquire_fence != -1) {
5444 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5445 close(request->input_buffer->acquire_fence);
5446 if (rc != OK) {
5447 LOGE("input buffer sync wait failed %d", rc);
5448 pthread_mutex_unlock(&mMutex);
5449 return rc;
5450 }
5451 }
5452 }
5453
5454 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5455 mLastCustIntentFrmNum = frameNumber;
5456 }
5457 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005458 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005459 pendingRequestIterator latestRequest;
5460 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005461 pendingRequest.num_buffers = depthRequestPresent ?
5462 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005463 pendingRequest.request_id = request_id;
5464 pendingRequest.blob_request = blob_request;
5465 pendingRequest.timestamp = 0;
5466 pendingRequest.bUrgentReceived = 0;
5467 if (request->input_buffer) {
5468 pendingRequest.input_buffer =
5469 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5470 *(pendingRequest.input_buffer) = *(request->input_buffer);
5471 pInputBuffer = pendingRequest.input_buffer;
5472 } else {
5473 pendingRequest.input_buffer = NULL;
5474 pInputBuffer = NULL;
5475 }
5476
5477 pendingRequest.pipeline_depth = 0;
5478 pendingRequest.partial_result_cnt = 0;
5479 extractJpegMetadata(mCurJpegMeta, request);
5480 pendingRequest.jpegMetadata = mCurJpegMeta;
5481 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5482 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005483 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005484 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
5485 mHybridAeEnable =
5486 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5487 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005488
5489 // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
5490 pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
Samuel Ha68ba5172016-12-15 18:41:12 -08005491 /* DevCamDebug metadata processCaptureRequest */
5492 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5493 mDevCamDebugMetaEnable =
5494 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5495 }
5496 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5497 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005498
5499 //extract CAC info
5500 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5501 mCacMode =
5502 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5503 }
5504 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005505 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005506
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005507 // extract enableZsl info
5508 if (gExposeEnableZslKey) {
5509 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5510 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5511 mZslEnabled = pendingRequest.enableZsl;
5512 } else {
5513 pendingRequest.enableZsl = mZslEnabled;
5514 }
5515 }
5516
Thierry Strudel3d639192016-09-09 11:52:26 -07005517 PendingBuffersInRequest bufsForCurRequest;
5518 bufsForCurRequest.frame_number = frameNumber;
5519 // Mark current timestamp for the new request
5520 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005521 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005522
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005523 if (hdrPlusRequest) {
5524 // Save settings for this request.
5525 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5526 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5527
5528 // Add to pending HDR+ request queue.
5529 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5530 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5531
5532 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5533 }
5534
Thierry Strudel3d639192016-09-09 11:52:26 -07005535 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005536 if ((request->output_buffers[i].stream->data_space ==
5537 HAL_DATASPACE_DEPTH) &&
5538 (HAL_PIXEL_FORMAT_BLOB ==
5539 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005540 continue;
5541 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005542 RequestedBufferInfo requestedBuf;
5543 memset(&requestedBuf, 0, sizeof(requestedBuf));
5544 requestedBuf.stream = request->output_buffers[i].stream;
5545 requestedBuf.buffer = NULL;
5546 pendingRequest.buffers.push_back(requestedBuf);
5547
5548 // Add to buffer handle the pending buffers list
5549 PendingBufferInfo bufferInfo;
5550 bufferInfo.buffer = request->output_buffers[i].buffer;
5551 bufferInfo.stream = request->output_buffers[i].stream;
5552 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5553 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5554 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5555 frameNumber, bufferInfo.buffer,
5556 channel->getStreamTypeMask(), bufferInfo.stream->format);
5557 }
5558 // Add this request packet into mPendingBuffersMap
5559 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5560 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5561 mPendingBuffersMap.get_num_overall_buffers());
5562
5563 latestRequest = mPendingRequestsList.insert(
5564 mPendingRequestsList.end(), pendingRequest);
5565 if(mFlush) {
5566 LOGI("mFlush is true");
5567 pthread_mutex_unlock(&mMutex);
5568 return NO_ERROR;
5569 }
5570
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005571 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5572 // channel.
5573 if (!hdrPlusRequest) {
5574 int indexUsed;
5575 // Notify metadata channel we receive a request
5576 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005577
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005578 if(request->input_buffer != NULL){
5579 LOGD("Input request, frame_number %d", frameNumber);
5580 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5581 if (NO_ERROR != rc) {
5582 LOGE("fail to set reproc parameters");
5583 pthread_mutex_unlock(&mMutex);
5584 return rc;
5585 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005586 }
5587
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005588 // Call request on other streams
5589 uint32_t streams_need_metadata = 0;
5590 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5591 for (size_t i = 0; i < request->num_output_buffers; i++) {
5592 const camera3_stream_buffer_t& output = request->output_buffers[i];
5593 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5594
5595 if (channel == NULL) {
5596 LOGW("invalid channel pointer for stream");
5597 continue;
5598 }
5599
5600 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5601 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5602 output.buffer, request->input_buffer, frameNumber);
5603 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005604 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005605 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5606 if (rc < 0) {
5607 LOGE("Fail to request on picture channel");
5608 pthread_mutex_unlock(&mMutex);
5609 return rc;
5610 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005611 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005612 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5613 assert(NULL != mDepthChannel);
5614 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005615
Emilian Peev7650c122017-01-19 08:24:33 -08005616 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5617 if (rc < 0) {
5618 LOGE("Fail to map on depth buffer");
5619 pthread_mutex_unlock(&mMutex);
5620 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005621 }
Emilian Peev7650c122017-01-19 08:24:33 -08005622 } else {
5623 LOGD("snapshot request with buffer %p, frame_number %d",
5624 output.buffer, frameNumber);
5625 if (!request->settings) {
5626 rc = channel->request(output.buffer, frameNumber,
5627 NULL, mPrevParameters, indexUsed);
5628 } else {
5629 rc = channel->request(output.buffer, frameNumber,
5630 NULL, mParameters, indexUsed);
5631 }
5632 if (rc < 0) {
5633 LOGE("Fail to request on picture channel");
5634 pthread_mutex_unlock(&mMutex);
5635 return rc;
5636 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005637
Emilian Peev7650c122017-01-19 08:24:33 -08005638 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5639 uint32_t j = 0;
5640 for (j = 0; j < streamsArray.num_streams; j++) {
5641 if (streamsArray.stream_request[j].streamID == streamId) {
5642 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5643 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5644 else
5645 streamsArray.stream_request[j].buf_index = indexUsed;
5646 break;
5647 }
5648 }
5649 if (j == streamsArray.num_streams) {
5650 LOGE("Did not find matching stream to update index");
5651 assert(0);
5652 }
5653
5654 pendingBufferIter->need_metadata = true;
5655 streams_need_metadata++;
5656 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005657 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005658 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5659 bool needMetadata = false;
5660 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5661 rc = yuvChannel->request(output.buffer, frameNumber,
5662 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5663 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005664 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005665 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005666 pthread_mutex_unlock(&mMutex);
5667 return rc;
5668 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005669
5670 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5671 uint32_t j = 0;
5672 for (j = 0; j < streamsArray.num_streams; j++) {
5673 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005674 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5675 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5676 else
5677 streamsArray.stream_request[j].buf_index = indexUsed;
5678 break;
5679 }
5680 }
5681 if (j == streamsArray.num_streams) {
5682 LOGE("Did not find matching stream to update index");
5683 assert(0);
5684 }
5685
5686 pendingBufferIter->need_metadata = needMetadata;
5687 if (needMetadata)
5688 streams_need_metadata += 1;
5689 LOGD("calling YUV channel request, need_metadata is %d",
5690 needMetadata);
5691 } else {
5692 LOGD("request with buffer %p, frame_number %d",
5693 output.buffer, frameNumber);
5694
5695 rc = channel->request(output.buffer, frameNumber, indexUsed);
5696
5697 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5698 uint32_t j = 0;
5699 for (j = 0; j < streamsArray.num_streams; j++) {
5700 if (streamsArray.stream_request[j].streamID == streamId) {
5701 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5702 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5703 else
5704 streamsArray.stream_request[j].buf_index = indexUsed;
5705 break;
5706 }
5707 }
5708 if (j == streamsArray.num_streams) {
5709 LOGE("Did not find matching stream to update index");
5710 assert(0);
5711 }
5712
5713 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5714 && mBatchSize) {
5715 mToBeQueuedVidBufs++;
5716 if (mToBeQueuedVidBufs == mBatchSize) {
5717 channel->queueBatchBuf();
5718 }
5719 }
5720 if (rc < 0) {
5721 LOGE("request failed");
5722 pthread_mutex_unlock(&mMutex);
5723 return rc;
5724 }
5725 }
5726 pendingBufferIter++;
5727 }
5728
5729 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5730 itr++) {
5731 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5732
5733 if (channel == NULL) {
5734 LOGE("invalid channel pointer for stream");
5735 assert(0);
5736 return BAD_VALUE;
5737 }
5738
5739 InternalRequest requestedStream;
5740 requestedStream = (*itr);
5741
5742
5743 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5744 LOGD("snapshot request internally input buffer %p, frame_number %d",
5745 request->input_buffer, frameNumber);
5746 if(request->input_buffer != NULL){
5747 rc = channel->request(NULL, frameNumber,
5748 pInputBuffer, &mReprocMeta, indexUsed, true,
5749 requestedStream.meteringOnly);
5750 if (rc < 0) {
5751 LOGE("Fail to request on picture channel");
5752 pthread_mutex_unlock(&mMutex);
5753 return rc;
5754 }
5755 } else {
5756 LOGD("snapshot request with frame_number %d", frameNumber);
5757 if (!request->settings) {
5758 rc = channel->request(NULL, frameNumber,
5759 NULL, mPrevParameters, indexUsed, true,
5760 requestedStream.meteringOnly);
5761 } else {
5762 rc = channel->request(NULL, frameNumber,
5763 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5764 }
5765 if (rc < 0) {
5766 LOGE("Fail to request on picture channel");
5767 pthread_mutex_unlock(&mMutex);
5768 return rc;
5769 }
5770
5771 if ((*itr).meteringOnly != 1) {
5772 requestedStream.need_metadata = 1;
5773 streams_need_metadata++;
5774 }
5775 }
5776
5777 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5778 uint32_t j = 0;
5779 for (j = 0; j < streamsArray.num_streams; j++) {
5780 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005781 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5782 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5783 else
5784 streamsArray.stream_request[j].buf_index = indexUsed;
5785 break;
5786 }
5787 }
5788 if (j == streamsArray.num_streams) {
5789 LOGE("Did not find matching stream to update index");
5790 assert(0);
5791 }
5792
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005793 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005794 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005795 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005796 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005797 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005798 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005799 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005800
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005801 //If 2 streams have need_metadata set to true, fail the request, unless
5802 //we copy/reference count the metadata buffer
5803 if (streams_need_metadata > 1) {
5804 LOGE("not supporting request in which two streams requires"
5805 " 2 HAL metadata for reprocessing");
5806 pthread_mutex_unlock(&mMutex);
5807 return -EINVAL;
5808 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005809
Emilian Peev7650c122017-01-19 08:24:33 -08005810 int32_t pdafEnable = depthRequestPresent ? 1 : 0;
5811 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5812 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5813 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5814 pthread_mutex_unlock(&mMutex);
5815 return BAD_VALUE;
5816 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005817 if (request->input_buffer == NULL) {
5818 /* Set the parameters to backend:
5819 * - For every request in NORMAL MODE
5820 * - For every request in HFR mode during preview only case
5821 * - Once every batch in HFR mode during video recording
5822 */
5823 if (!mBatchSize ||
5824 (mBatchSize && !isVidBufRequested) ||
5825 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5826 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5827 mBatchSize, isVidBufRequested,
5828 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005829
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005830 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5831 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5832 uint32_t m = 0;
5833 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5834 if (streamsArray.stream_request[k].streamID ==
5835 mBatchedStreamsArray.stream_request[m].streamID)
5836 break;
5837 }
5838 if (m == mBatchedStreamsArray.num_streams) {
5839 mBatchedStreamsArray.stream_request\
5840 [mBatchedStreamsArray.num_streams].streamID =
5841 streamsArray.stream_request[k].streamID;
5842 mBatchedStreamsArray.stream_request\
5843 [mBatchedStreamsArray.num_streams].buf_index =
5844 streamsArray.stream_request[k].buf_index;
5845 mBatchedStreamsArray.num_streams =
5846 mBatchedStreamsArray.num_streams + 1;
5847 }
5848 }
5849 streamsArray = mBatchedStreamsArray;
5850 }
5851 /* Update stream id of all the requested buffers */
5852 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5853 streamsArray)) {
5854 LOGE("Failed to set stream type mask in the parameters");
5855 return BAD_VALUE;
5856 }
5857
5858 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5859 mParameters);
5860 if (rc < 0) {
5861 LOGE("set_parms failed");
5862 }
5863 /* reset to zero coz, the batch is queued */
5864 mToBeQueuedVidBufs = 0;
5865 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5866 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5867 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005868 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5869 uint32_t m = 0;
5870 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5871 if (streamsArray.stream_request[k].streamID ==
5872 mBatchedStreamsArray.stream_request[m].streamID)
5873 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005874 }
5875 if (m == mBatchedStreamsArray.num_streams) {
5876 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5877 streamID = streamsArray.stream_request[k].streamID;
5878 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5879 buf_index = streamsArray.stream_request[k].buf_index;
5880 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5881 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005882 }
5883 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005884 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005885 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005886 }
5887
5888 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5889
5890 mState = STARTED;
5891 // Added a timed condition wait
5892 struct timespec ts;
5893 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005894 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005895 if (rc < 0) {
5896 isValidTimeout = 0;
5897 LOGE("Error reading the real time clock!!");
5898 }
5899 else {
5900 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005901 int64_t timeout = 5;
5902 {
5903 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5904 // If there is a pending HDR+ request, the following requests may be blocked until the
5905 // HDR+ request is done. So allow a longer timeout.
5906 if (mHdrPlusPendingRequests.size() > 0) {
5907 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5908 }
5909 }
5910 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005911 }
5912 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005913 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005914 (mState != ERROR) && (mState != DEINIT)) {
5915 if (!isValidTimeout) {
5916 LOGD("Blocking on conditional wait");
5917 pthread_cond_wait(&mRequestCond, &mMutex);
5918 }
5919 else {
5920 LOGD("Blocking on timed conditional wait");
5921 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5922 if (rc == ETIMEDOUT) {
5923 rc = -ENODEV;
5924 LOGE("Unblocked on timeout!!!!");
5925 break;
5926 }
5927 }
5928 LOGD("Unblocked");
5929 if (mWokenUpByDaemon) {
5930 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005931 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005932 break;
5933 }
5934 }
5935 pthread_mutex_unlock(&mMutex);
5936
5937 return rc;
5938}
5939
5940/*===========================================================================
5941 * FUNCTION : dump
5942 *
5943 * DESCRIPTION:
5944 *
5945 * PARAMETERS :
5946 *
5947 *
5948 * RETURN :
5949 *==========================================================================*/
5950void QCamera3HardwareInterface::dump(int fd)
5951{
5952 pthread_mutex_lock(&mMutex);
5953 dprintf(fd, "\n Camera HAL3 information Begin \n");
5954
5955 dprintf(fd, "\nNumber of pending requests: %zu \n",
5956 mPendingRequestsList.size());
5957 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5958 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5959 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5960 for(pendingRequestIterator i = mPendingRequestsList.begin();
5961 i != mPendingRequestsList.end(); i++) {
5962 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5963 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5964 i->input_buffer);
5965 }
5966 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5967 mPendingBuffersMap.get_num_overall_buffers());
5968 dprintf(fd, "-------+------------------\n");
5969 dprintf(fd, " Frame | Stream type mask \n");
5970 dprintf(fd, "-------+------------------\n");
5971 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5972 for(auto &j : req.mPendingBufferList) {
5973 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5974 dprintf(fd, " %5d | %11d \n",
5975 req.frame_number, channel->getStreamTypeMask());
5976 }
5977 }
5978 dprintf(fd, "-------+------------------\n");
5979
5980 dprintf(fd, "\nPending frame drop list: %zu\n",
5981 mPendingFrameDropList.size());
5982 dprintf(fd, "-------+-----------\n");
5983 dprintf(fd, " Frame | Stream ID \n");
5984 dprintf(fd, "-------+-----------\n");
5985 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5986 i != mPendingFrameDropList.end(); i++) {
5987 dprintf(fd, " %5d | %9d \n",
5988 i->frame_number, i->stream_ID);
5989 }
5990 dprintf(fd, "-------+-----------\n");
5991
5992 dprintf(fd, "\n Camera HAL3 information End \n");
5993
5994 /* use dumpsys media.camera as trigger to send update debug level event */
5995 mUpdateDebugLevel = true;
5996 pthread_mutex_unlock(&mMutex);
5997 return;
5998}
5999
6000/*===========================================================================
6001 * FUNCTION : flush
6002 *
6003 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6004 * conditionally restarts channels
6005 *
6006 * PARAMETERS :
6007 * @ restartChannels: re-start all channels
6008 *
6009 *
6010 * RETURN :
6011 * 0 on success
6012 * Error code on failure
6013 *==========================================================================*/
6014int QCamera3HardwareInterface::flush(bool restartChannels)
6015{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006016 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006017 int32_t rc = NO_ERROR;
6018
6019 LOGD("Unblocking Process Capture Request");
6020 pthread_mutex_lock(&mMutex);
6021 mFlush = true;
6022 pthread_mutex_unlock(&mMutex);
6023
6024 rc = stopAllChannels();
6025 // unlink of dualcam
6026 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006027 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6028 &m_pDualCamCmdPtr->bundle_info;
6029 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006030 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6031 pthread_mutex_lock(&gCamLock);
6032
6033 if (mIsMainCamera == 1) {
6034 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6035 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006036 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006037 // related session id should be session id of linked session
6038 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6039 } else {
6040 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6041 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006042 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006043 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6044 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006045 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006046 pthread_mutex_unlock(&gCamLock);
6047
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006048 rc = mCameraHandle->ops->set_dual_cam_cmd(
6049 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006050 if (rc < 0) {
6051 LOGE("Dualcam: Unlink failed, but still proceed to close");
6052 }
6053 }
6054
6055 if (rc < 0) {
6056 LOGE("stopAllChannels failed");
6057 return rc;
6058 }
6059 if (mChannelHandle) {
6060 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6061 mChannelHandle);
6062 }
6063
6064 // Reset bundle info
6065 rc = setBundleInfo();
6066 if (rc < 0) {
6067 LOGE("setBundleInfo failed %d", rc);
6068 return rc;
6069 }
6070
6071 // Mutex Lock
6072 pthread_mutex_lock(&mMutex);
6073
6074 // Unblock process_capture_request
6075 mPendingLiveRequest = 0;
6076 pthread_cond_signal(&mRequestCond);
6077
6078 rc = notifyErrorForPendingRequests();
6079 if (rc < 0) {
6080 LOGE("notifyErrorForPendingRequests failed");
6081 pthread_mutex_unlock(&mMutex);
6082 return rc;
6083 }
6084
6085 mFlush = false;
6086
6087 // Start the Streams/Channels
6088 if (restartChannels) {
6089 rc = startAllChannels();
6090 if (rc < 0) {
6091 LOGE("startAllChannels failed");
6092 pthread_mutex_unlock(&mMutex);
6093 return rc;
6094 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006095 if (mChannelHandle) {
6096 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6097 mChannelHandle);
6098 if (rc < 0) {
6099 LOGE("start_channel failed");
6100 pthread_mutex_unlock(&mMutex);
6101 return rc;
6102 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006103 }
6104 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006105 pthread_mutex_unlock(&mMutex);
6106
6107 return 0;
6108}
6109
6110/*===========================================================================
6111 * FUNCTION : flushPerf
6112 *
6113 * DESCRIPTION: This is the performance optimization version of flush that does
6114 * not use stream off, rather flushes the system
6115 *
6116 * PARAMETERS :
6117 *
6118 *
6119 * RETURN : 0 : success
6120 * -EINVAL: input is malformed (device is not valid)
6121 * -ENODEV: if the device has encountered a serious error
6122 *==========================================================================*/
6123int QCamera3HardwareInterface::flushPerf()
6124{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006125 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006126 int32_t rc = 0;
6127 struct timespec timeout;
6128 bool timed_wait = false;
6129
6130 pthread_mutex_lock(&mMutex);
6131 mFlushPerf = true;
6132 mPendingBuffersMap.numPendingBufsAtFlush =
6133 mPendingBuffersMap.get_num_overall_buffers();
6134 LOGD("Calling flush. Wait for %d buffers to return",
6135 mPendingBuffersMap.numPendingBufsAtFlush);
6136
6137 /* send the flush event to the backend */
6138 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6139 if (rc < 0) {
6140 LOGE("Error in flush: IOCTL failure");
6141 mFlushPerf = false;
6142 pthread_mutex_unlock(&mMutex);
6143 return -ENODEV;
6144 }
6145
6146 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6147 LOGD("No pending buffers in HAL, return flush");
6148 mFlushPerf = false;
6149 pthread_mutex_unlock(&mMutex);
6150 return rc;
6151 }
6152
6153 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006154 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006155 if (rc < 0) {
6156 LOGE("Error reading the real time clock, cannot use timed wait");
6157 } else {
6158 timeout.tv_sec += FLUSH_TIMEOUT;
6159 timed_wait = true;
6160 }
6161
6162 //Block on conditional variable
6163 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6164 LOGD("Waiting on mBuffersCond");
6165 if (!timed_wait) {
6166 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6167 if (rc != 0) {
6168 LOGE("pthread_cond_wait failed due to rc = %s",
6169 strerror(rc));
6170 break;
6171 }
6172 } else {
6173 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6174 if (rc != 0) {
6175 LOGE("pthread_cond_timedwait failed due to rc = %s",
6176 strerror(rc));
6177 break;
6178 }
6179 }
6180 }
6181 if (rc != 0) {
6182 mFlushPerf = false;
6183 pthread_mutex_unlock(&mMutex);
6184 return -ENODEV;
6185 }
6186
6187 LOGD("Received buffers, now safe to return them");
6188
6189 //make sure the channels handle flush
6190 //currently only required for the picture channel to release snapshot resources
6191 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6192 it != mStreamInfo.end(); it++) {
6193 QCamera3Channel *channel = (*it)->channel;
6194 if (channel) {
6195 rc = channel->flush();
6196 if (rc) {
6197 LOGE("Flushing the channels failed with error %d", rc);
6198 // even though the channel flush failed we need to continue and
6199 // return the buffers we have to the framework, however the return
6200 // value will be an error
6201 rc = -ENODEV;
6202 }
6203 }
6204 }
6205
6206 /* notify the frameworks and send errored results */
6207 rc = notifyErrorForPendingRequests();
6208 if (rc < 0) {
6209 LOGE("notifyErrorForPendingRequests failed");
6210 pthread_mutex_unlock(&mMutex);
6211 return rc;
6212 }
6213
6214 //unblock process_capture_request
6215 mPendingLiveRequest = 0;
6216 unblockRequestIfNecessary();
6217
6218 mFlushPerf = false;
6219 pthread_mutex_unlock(&mMutex);
6220 LOGD ("Flush Operation complete. rc = %d", rc);
6221 return rc;
6222}
6223
6224/*===========================================================================
6225 * FUNCTION : handleCameraDeviceError
6226 *
6227 * DESCRIPTION: This function calls internal flush and notifies the error to
6228 * framework and updates the state variable.
6229 *
6230 * PARAMETERS : None
6231 *
6232 * RETURN : NO_ERROR on Success
6233 * Error code on failure
6234 *==========================================================================*/
6235int32_t QCamera3HardwareInterface::handleCameraDeviceError()
6236{
6237 int32_t rc = NO_ERROR;
6238
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006239 {
6240 Mutex::Autolock lock(mFlushLock);
6241 pthread_mutex_lock(&mMutex);
6242 if (mState != ERROR) {
6243 //if mState != ERROR, nothing to be done
6244 pthread_mutex_unlock(&mMutex);
6245 return NO_ERROR;
6246 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006247 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006248
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006249 rc = flush(false /* restart channels */);
6250 if (NO_ERROR != rc) {
6251 LOGE("internal flush to handle mState = ERROR failed");
6252 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006253
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006254 pthread_mutex_lock(&mMutex);
6255 mState = DEINIT;
6256 pthread_mutex_unlock(&mMutex);
6257 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006258
6259 camera3_notify_msg_t notify_msg;
6260 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6261 notify_msg.type = CAMERA3_MSG_ERROR;
6262 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6263 notify_msg.message.error.error_stream = NULL;
6264 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006265 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006266
6267 return rc;
6268}
6269
6270/*===========================================================================
6271 * FUNCTION : captureResultCb
6272 *
6273 * DESCRIPTION: Callback handler for all capture result
6274 * (streams, as well as metadata)
6275 *
6276 * PARAMETERS :
6277 * @metadata : metadata information
6278 * @buffer : actual gralloc buffer to be returned to frameworks.
6279 * NULL if metadata.
6280 *
6281 * RETURN : NONE
6282 *==========================================================================*/
6283void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6284 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6285{
6286 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006287 pthread_mutex_lock(&mMutex);
6288 uint8_t batchSize = mBatchSize;
6289 pthread_mutex_unlock(&mMutex);
6290 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006291 handleBatchMetadata(metadata_buf,
6292 true /* free_and_bufdone_meta_buf */);
6293 } else { /* mBatchSize = 0 */
6294 hdrPlusPerfLock(metadata_buf);
6295 pthread_mutex_lock(&mMutex);
6296 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006297 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006298 true /* last urgent frame of batch metadata */,
6299 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006300 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006301 pthread_mutex_unlock(&mMutex);
6302 }
6303 } else if (isInputBuffer) {
6304 pthread_mutex_lock(&mMutex);
6305 handleInputBufferWithLock(frame_number);
6306 pthread_mutex_unlock(&mMutex);
6307 } else {
6308 pthread_mutex_lock(&mMutex);
6309 handleBufferWithLock(buffer, frame_number);
6310 pthread_mutex_unlock(&mMutex);
6311 }
6312 return;
6313}
6314
6315/*===========================================================================
6316 * FUNCTION : getReprocessibleOutputStreamId
6317 *
6318 * DESCRIPTION: Get source output stream id for the input reprocess stream
6319 * based on size and format, which would be the largest
6320 * output stream if an input stream exists.
6321 *
6322 * PARAMETERS :
6323 * @id : return the stream id if found
6324 *
6325 * RETURN : int32_t type of status
6326 * NO_ERROR -- success
6327 * none-zero failure code
6328 *==========================================================================*/
6329int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6330{
6331 /* check if any output or bidirectional stream with the same size and format
6332 and return that stream */
6333 if ((mInputStreamInfo.dim.width > 0) &&
6334 (mInputStreamInfo.dim.height > 0)) {
6335 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6336 it != mStreamInfo.end(); it++) {
6337
6338 camera3_stream_t *stream = (*it)->stream;
6339 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6340 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6341 (stream->format == mInputStreamInfo.format)) {
6342 // Usage flag for an input stream and the source output stream
6343 // may be different.
6344 LOGD("Found reprocessible output stream! %p", *it);
6345 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6346 stream->usage, mInputStreamInfo.usage);
6347
6348 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6349 if (channel != NULL && channel->mStreams[0]) {
6350 id = channel->mStreams[0]->getMyServerID();
6351 return NO_ERROR;
6352 }
6353 }
6354 }
6355 } else {
6356 LOGD("No input stream, so no reprocessible output stream");
6357 }
6358 return NAME_NOT_FOUND;
6359}
6360
6361/*===========================================================================
6362 * FUNCTION : lookupFwkName
6363 *
6364 * DESCRIPTION: In case the enum is not same in fwk and backend
6365 * make sure the parameter is correctly propogated
6366 *
6367 * PARAMETERS :
6368 * @arr : map between the two enums
6369 * @len : len of the map
6370 * @hal_name : name of the hal_parm to map
6371 *
6372 * RETURN : int type of status
6373 * fwk_name -- success
6374 * none-zero failure code
6375 *==========================================================================*/
6376template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6377 size_t len, halType hal_name)
6378{
6379
6380 for (size_t i = 0; i < len; i++) {
6381 if (arr[i].hal_name == hal_name) {
6382 return arr[i].fwk_name;
6383 }
6384 }
6385
6386 /* Not able to find matching framework type is not necessarily
6387 * an error case. This happens when mm-camera supports more attributes
6388 * than the frameworks do */
6389 LOGH("Cannot find matching framework type");
6390 return NAME_NOT_FOUND;
6391}
6392
6393/*===========================================================================
6394 * FUNCTION : lookupHalName
6395 *
6396 * DESCRIPTION: In case the enum is not same in fwk and backend
6397 * make sure the parameter is correctly propogated
6398 *
6399 * PARAMETERS :
6400 * @arr : map between the two enums
6401 * @len : len of the map
6402 * @fwk_name : name of the hal_parm to map
6403 *
6404 * RETURN : int32_t type of status
6405 * hal_name -- success
6406 * none-zero failure code
6407 *==========================================================================*/
6408template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6409 size_t len, fwkType fwk_name)
6410{
6411 for (size_t i = 0; i < len; i++) {
6412 if (arr[i].fwk_name == fwk_name) {
6413 return arr[i].hal_name;
6414 }
6415 }
6416
6417 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6418 return NAME_NOT_FOUND;
6419}
6420
6421/*===========================================================================
6422 * FUNCTION : lookupProp
6423 *
6424 * DESCRIPTION: lookup a value by its name
6425 *
6426 * PARAMETERS :
6427 * @arr : map between the two enums
6428 * @len : size of the map
6429 * @name : name to be looked up
6430 *
6431 * RETURN : Value if found
6432 * CAM_CDS_MODE_MAX if not found
6433 *==========================================================================*/
6434template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6435 size_t len, const char *name)
6436{
6437 if (name) {
6438 for (size_t i = 0; i < len; i++) {
6439 if (!strcmp(arr[i].desc, name)) {
6440 return arr[i].val;
6441 }
6442 }
6443 }
6444 return CAM_CDS_MODE_MAX;
6445}
6446
6447/*===========================================================================
6448 *
6449 * DESCRIPTION:
6450 *
6451 * PARAMETERS :
6452 * @metadata : metadata information from callback
6453 * @timestamp: metadata buffer timestamp
6454 * @request_id: request id
6455 * @jpegMetadata: additional jpeg metadata
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006456 * @hybrid_ae_enable: whether hybrid ae is enabled
Samuel Ha68ba5172016-12-15 18:41:12 -08006457 * @DevCamDebug_meta_enable: enable DevCamDebug meta
6458 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07006459 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006460 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6461 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006462 *
6463 * RETURN : camera_metadata_t*
6464 * metadata in a format specified by fwk
6465 *==========================================================================*/
6466camera_metadata_t*
6467QCamera3HardwareInterface::translateFromHalMetadata(
6468 metadata_buffer_t *metadata,
6469 nsecs_t timestamp,
6470 int32_t request_id,
6471 const CameraMetadata& jpegMetadata,
6472 uint8_t pipeline_depth,
6473 uint8_t capture_intent,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006474 uint8_t hybrid_ae_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08006475 /* DevCamDebug metadata translateFromHalMetadata argument */
6476 uint8_t DevCamDebug_meta_enable,
6477 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07006478 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006479 uint8_t fwk_cacMode,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006480 bool lastMetadataInBatch,
6481 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006482{
6483 CameraMetadata camMetadata;
6484 camera_metadata_t *resultMetadata;
6485
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006486 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006487 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6488 * Timestamp is needed because it's used for shutter notify calculation.
6489 * */
6490 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6491 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006492 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006493 }
6494
Thierry Strudel3d639192016-09-09 11:52:26 -07006495 if (jpegMetadata.entryCount())
6496 camMetadata.append(jpegMetadata);
6497
6498 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
6499 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
6500 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
6501 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07006502 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006503 if (mBatchSize == 0) {
6504 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6505 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6506 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006507
Samuel Ha68ba5172016-12-15 18:41:12 -08006508 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6509 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6510 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6511 // DevCamDebug metadata translateFromHalMetadata AF
6512 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6513 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6514 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6515 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6516 }
6517 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6518 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6519 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6520 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6521 }
6522 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6523 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6524 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6525 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6526 }
6527 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6528 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6529 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6530 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6531 }
6532 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6533 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6534 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6535 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6536 }
6537 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6538 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6539 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6540 *DevCamDebug_af_monitor_pdaf_target_pos;
6541 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6542 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6543 }
6544 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6545 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6546 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6547 *DevCamDebug_af_monitor_pdaf_confidence;
6548 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6549 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6550 }
6551 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6552 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6553 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6554 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6555 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6556 }
6557 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6558 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6559 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6560 *DevCamDebug_af_monitor_tof_target_pos;
6561 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6562 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6563 }
6564 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6565 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6566 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6567 *DevCamDebug_af_monitor_tof_confidence;
6568 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6569 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6570 }
6571 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6572 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6573 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6574 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6575 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6576 }
6577 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6578 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6579 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6580 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6581 &fwk_DevCamDebug_af_monitor_type_select, 1);
6582 }
6583 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6584 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6585 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6586 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6587 &fwk_DevCamDebug_af_monitor_refocus, 1);
6588 }
6589 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6590 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6591 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6592 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6593 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6594 }
6595 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6596 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6597 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6598 *DevCamDebug_af_search_pdaf_target_pos;
6599 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6600 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6601 }
6602 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6603 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6604 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6605 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6606 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6607 }
6608 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6609 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6610 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6611 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6612 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6613 }
6614 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6615 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6616 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6617 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6618 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6619 }
6620 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6621 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6622 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6623 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6624 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6625 }
6626 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6627 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6628 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6629 *DevCamDebug_af_search_tof_target_pos;
6630 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6631 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6632 }
6633 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6634 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6635 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6636 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6637 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6638 }
6639 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6640 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6641 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6642 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6643 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6644 }
6645 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6646 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6647 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6648 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6649 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6650 }
6651 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6652 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6653 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6654 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6655 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6656 }
6657 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6658 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6659 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6660 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6661 &fwk_DevCamDebug_af_search_type_select, 1);
6662 }
6663 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6664 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6665 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6666 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6667 &fwk_DevCamDebug_af_search_next_pos, 1);
6668 }
6669 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6670 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6671 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6672 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6673 &fwk_DevCamDebug_af_search_target_pos, 1);
6674 }
6675 // DevCamDebug metadata translateFromHalMetadata AEC
6676 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6677 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6678 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6679 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6680 }
6681 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6682 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6683 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6684 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6685 }
6686 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6687 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6688 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6689 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6690 }
6691 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6692 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6693 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6694 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6695 }
6696 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6697 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6698 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6699 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6700 }
6701 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6702 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6703 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6704 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6705 }
6706 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6707 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6708 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6709 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6710 }
6711 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6712 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6713 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6714 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6715 }
Samuel Ha34229982017-02-17 13:51:11 -08006716 // DevCamDebug metadata translateFromHalMetadata zzHDR
6717 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6718 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6719 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6720 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6721 }
6722 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6723 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006724 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006725 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6726 }
6727 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6728 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6729 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6730 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6731 }
6732 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6733 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006734 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006735 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6736 }
6737 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6738 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6739 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6740 *DevCamDebug_aec_hdr_sensitivity_ratio;
6741 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6742 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6743 }
6744 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6745 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6746 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6747 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6748 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6749 }
6750 // DevCamDebug metadata translateFromHalMetadata ADRC
6751 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6752 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6753 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6754 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6755 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6756 }
6757 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6758 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6759 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6760 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6761 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6762 }
6763 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6764 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6765 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6766 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6767 }
6768 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6769 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6770 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6771 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6772 }
6773 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
6774 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
6775 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
6776 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
6777 }
6778 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
6779 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
6780 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
6781 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
6782 }
Samuel Ha68ba5172016-12-15 18:41:12 -08006783 // DevCamDebug metadata translateFromHalMetadata AWB
6784 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6785 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6786 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6787 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6788 }
6789 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6790 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6791 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6792 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6793 }
6794 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6795 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6796 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6797 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6798 }
6799 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6800 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6801 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6802 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6803 }
6804 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6805 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6806 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6807 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6808 }
6809 }
6810 // atrace_end(ATRACE_TAG_ALWAYS);
6811
Thierry Strudel3d639192016-09-09 11:52:26 -07006812 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6813 int64_t fwk_frame_number = *frame_number;
6814 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6815 }
6816
6817 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6818 int32_t fps_range[2];
6819 fps_range[0] = (int32_t)float_range->min_fps;
6820 fps_range[1] = (int32_t)float_range->max_fps;
6821 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6822 fps_range, 2);
6823 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6824 fps_range[0], fps_range[1]);
6825 }
6826
6827 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6828 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6829 }
6830
6831 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6832 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6833 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6834 *sceneMode);
6835 if (NAME_NOT_FOUND != val) {
6836 uint8_t fwkSceneMode = (uint8_t)val;
6837 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6838 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6839 fwkSceneMode);
6840 }
6841 }
6842
6843 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6844 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6845 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6846 }
6847
6848 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6849 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6850 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6851 }
6852
6853 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6854 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6855 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6856 }
6857
6858 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6859 CAM_INTF_META_EDGE_MODE, metadata) {
6860 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6861 }
6862
6863 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6864 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6865 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6866 }
6867
6868 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6869 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6870 }
6871
6872 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6873 if (0 <= *flashState) {
6874 uint8_t fwk_flashState = (uint8_t) *flashState;
6875 if (!gCamCapability[mCameraId]->flash_available) {
6876 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6877 }
6878 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6879 }
6880 }
6881
6882 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6883 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6884 if (NAME_NOT_FOUND != val) {
6885 uint8_t fwk_flashMode = (uint8_t)val;
6886 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6887 }
6888 }
6889
6890 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6891 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6892 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6893 }
6894
6895 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6896 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6897 }
6898
6899 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6900 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6901 }
6902
6903 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6904 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6905 }
6906
6907 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6908 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6909 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6910 }
6911
6912 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6913 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6914 LOGD("fwk_videoStab = %d", fwk_videoStab);
6915 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6916 } else {
6917 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6918 // and so hardcoding the Video Stab result to OFF mode.
6919 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6920 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006921 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006922 }
6923
6924 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6925 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6926 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6927 }
6928
6929 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6930 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6931 }
6932
Thierry Strudel3d639192016-09-09 11:52:26 -07006933 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6934 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006935 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006936
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006937 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6938 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006939
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006940 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006941 blackLevelAppliedPattern->cam_black_level[0],
6942 blackLevelAppliedPattern->cam_black_level[1],
6943 blackLevelAppliedPattern->cam_black_level[2],
6944 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006945 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6946 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006947
6948#ifndef USE_HAL_3_3
6949 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05306950 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07006951 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05306952 fwk_blackLevelInd[0] /= 16.0;
6953 fwk_blackLevelInd[1] /= 16.0;
6954 fwk_blackLevelInd[2] /= 16.0;
6955 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006956 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6957 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006958#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006959 }
6960
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006961#ifndef USE_HAL_3_3
6962 // Fixed whitelevel is used by ISP/Sensor
6963 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6964 &gCamCapability[mCameraId]->white_level, 1);
6965#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006966
6967 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6968 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6969 int32_t scalerCropRegion[4];
6970 scalerCropRegion[0] = hScalerCropRegion->left;
6971 scalerCropRegion[1] = hScalerCropRegion->top;
6972 scalerCropRegion[2] = hScalerCropRegion->width;
6973 scalerCropRegion[3] = hScalerCropRegion->height;
6974
6975 // Adjust crop region from sensor output coordinate system to active
6976 // array coordinate system.
6977 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6978 scalerCropRegion[2], scalerCropRegion[3]);
6979
6980 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6981 }
6982
6983 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6984 LOGD("sensorExpTime = %lld", *sensorExpTime);
6985 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6986 }
6987
6988 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6989 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6990 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6991 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6992 }
6993
6994 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6995 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6996 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6997 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6998 sensorRollingShutterSkew, 1);
6999 }
7000
7001 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7002 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7003 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7004
7005 //calculate the noise profile based on sensitivity
7006 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7007 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7008 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7009 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7010 noise_profile[i] = noise_profile_S;
7011 noise_profile[i+1] = noise_profile_O;
7012 }
7013 LOGD("noise model entry (S, O) is (%f, %f)",
7014 noise_profile_S, noise_profile_O);
7015 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7016 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7017 }
7018
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007019#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007020 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007021 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007022 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007023 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007024 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7025 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7026 }
7027 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007028#endif
7029
Thierry Strudel3d639192016-09-09 11:52:26 -07007030 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7031 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7032 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7033 }
7034
7035 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7036 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7037 *faceDetectMode);
7038 if (NAME_NOT_FOUND != val) {
7039 uint8_t fwk_faceDetectMode = (uint8_t)val;
7040 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7041
7042 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7043 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7044 CAM_INTF_META_FACE_DETECTION, metadata) {
7045 uint8_t numFaces = MIN(
7046 faceDetectionInfo->num_faces_detected, MAX_ROI);
7047 int32_t faceIds[MAX_ROI];
7048 uint8_t faceScores[MAX_ROI];
7049 int32_t faceRectangles[MAX_ROI * 4];
7050 int32_t faceLandmarks[MAX_ROI * 6];
7051 size_t j = 0, k = 0;
7052
7053 for (size_t i = 0; i < numFaces; i++) {
7054 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7055 // Adjust crop region from sensor output coordinate system to active
7056 // array coordinate system.
7057 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
7058 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7059 rect.width, rect.height);
7060
7061 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
7062 faceRectangles+j, -1);
7063
Jason Lee8ce36fa2017-04-19 19:40:37 -07007064 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7065 "bottom-right (%d, %d)",
7066 faceDetectionInfo->frame_id, i,
7067 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7068 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7069
Thierry Strudel3d639192016-09-09 11:52:26 -07007070 j+= 4;
7071 }
7072 if (numFaces <= 0) {
7073 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7074 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7075 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7076 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7077 }
7078
7079 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7080 numFaces);
7081 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7082 faceRectangles, numFaces * 4U);
7083 if (fwk_faceDetectMode ==
7084 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7085 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7086 CAM_INTF_META_FACE_LANDMARK, metadata) {
7087
7088 for (size_t i = 0; i < numFaces; i++) {
7089 // Map the co-ordinate sensor output coordinate system to active
7090 // array coordinate system.
7091 mCropRegionMapper.toActiveArray(
7092 landmarks->face_landmarks[i].left_eye_center.x,
7093 landmarks->face_landmarks[i].left_eye_center.y);
7094 mCropRegionMapper.toActiveArray(
7095 landmarks->face_landmarks[i].right_eye_center.x,
7096 landmarks->face_landmarks[i].right_eye_center.y);
7097 mCropRegionMapper.toActiveArray(
7098 landmarks->face_landmarks[i].mouth_center.x,
7099 landmarks->face_landmarks[i].mouth_center.y);
7100
7101 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007102
7103 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7104 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7105 faceDetectionInfo->frame_id, i,
7106 faceLandmarks[k + LEFT_EYE_X],
7107 faceLandmarks[k + LEFT_EYE_Y],
7108 faceLandmarks[k + RIGHT_EYE_X],
7109 faceLandmarks[k + RIGHT_EYE_Y],
7110 faceLandmarks[k + MOUTH_X],
7111 faceLandmarks[k + MOUTH_Y]);
7112
Thierry Strudel04e026f2016-10-10 11:27:36 -07007113 k+= TOTAL_LANDMARK_INDICES;
7114 }
7115 } else {
7116 for (size_t i = 0; i < numFaces; i++) {
7117 setInvalidLandmarks(faceLandmarks+k);
7118 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007119 }
7120 }
7121
Jason Lee49619db2017-04-13 12:07:22 -07007122 for (size_t i = 0; i < numFaces; i++) {
7123 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7124
7125 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7126 faceDetectionInfo->frame_id, i, faceIds[i]);
7127 }
7128
Thierry Strudel3d639192016-09-09 11:52:26 -07007129 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7130 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7131 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007132 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007133 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7134 CAM_INTF_META_FACE_BLINK, metadata) {
7135 uint8_t detected[MAX_ROI];
7136 uint8_t degree[MAX_ROI * 2];
7137 for (size_t i = 0; i < numFaces; i++) {
7138 detected[i] = blinks->blink[i].blink_detected;
7139 degree[2 * i] = blinks->blink[i].left_blink;
7140 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007141
Jason Lee49619db2017-04-13 12:07:22 -07007142 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7143 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7144 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7145 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007146 }
7147 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7148 detected, numFaces);
7149 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7150 degree, numFaces * 2);
7151 }
7152 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7153 CAM_INTF_META_FACE_SMILE, metadata) {
7154 uint8_t degree[MAX_ROI];
7155 uint8_t confidence[MAX_ROI];
7156 for (size_t i = 0; i < numFaces; i++) {
7157 degree[i] = smiles->smile[i].smile_degree;
7158 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007159
Jason Lee49619db2017-04-13 12:07:22 -07007160 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7161 "smile_degree=%d, smile_score=%d",
7162 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007163 }
7164 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7165 degree, numFaces);
7166 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7167 confidence, numFaces);
7168 }
7169 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7170 CAM_INTF_META_FACE_GAZE, metadata) {
7171 int8_t angle[MAX_ROI];
7172 int32_t direction[MAX_ROI * 3];
7173 int8_t degree[MAX_ROI * 2];
7174 for (size_t i = 0; i < numFaces; i++) {
7175 angle[i] = gazes->gaze[i].gaze_angle;
7176 direction[3 * i] = gazes->gaze[i].updown_dir;
7177 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7178 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7179 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7180 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007181
7182 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7183 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7184 "left_right_gaze=%d, top_bottom_gaze=%d",
7185 faceDetectionInfo->frame_id, i, angle[i],
7186 direction[3 * i], direction[3 * i + 1],
7187 direction[3 * i + 2],
7188 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007189 }
7190 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7191 (uint8_t *)angle, numFaces);
7192 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7193 direction, numFaces * 3);
7194 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7195 (uint8_t *)degree, numFaces * 2);
7196 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007197 }
7198 }
7199 }
7200 }
7201
7202 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7203 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007204 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007205 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007206 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007207
Shuzhen Wang14415f52016-11-16 18:26:18 -08007208 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7209 histogramBins = *histBins;
7210 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7211 }
7212
7213 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007214 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7215 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007216 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007217
7218 switch (stats_data->type) {
7219 case CAM_HISTOGRAM_TYPE_BAYER:
7220 switch (stats_data->bayer_stats.data_type) {
7221 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007222 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7223 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007224 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007225 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7226 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007227 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007228 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7229 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007230 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007231 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007232 case CAM_STATS_CHANNEL_R:
7233 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007234 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7235 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007236 }
7237 break;
7238 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007239 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007240 break;
7241 }
7242
Shuzhen Wang14415f52016-11-16 18:26:18 -08007243 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007244 }
7245 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007246 }
7247
7248 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7249 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7250 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7251 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7252 }
7253
7254 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7255 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7256 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7257 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7258 }
7259
7260 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7261 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7262 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7263 CAM_MAX_SHADING_MAP_HEIGHT);
7264 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7265 CAM_MAX_SHADING_MAP_WIDTH);
7266 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7267 lensShadingMap->lens_shading, 4U * map_width * map_height);
7268 }
7269
7270 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7271 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7272 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7273 }
7274
7275 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7276 //Populate CAM_INTF_META_TONEMAP_CURVES
7277 /* ch0 = G, ch 1 = B, ch 2 = R*/
7278 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7279 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7280 tonemap->tonemap_points_cnt,
7281 CAM_MAX_TONEMAP_CURVE_SIZE);
7282 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7283 }
7284
7285 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7286 &tonemap->curves[0].tonemap_points[0][0],
7287 tonemap->tonemap_points_cnt * 2);
7288
7289 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7290 &tonemap->curves[1].tonemap_points[0][0],
7291 tonemap->tonemap_points_cnt * 2);
7292
7293 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7294 &tonemap->curves[2].tonemap_points[0][0],
7295 tonemap->tonemap_points_cnt * 2);
7296 }
7297
7298 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7299 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7300 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7301 CC_GAIN_MAX);
7302 }
7303
7304 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7305 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7306 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7307 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7308 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7309 }
7310
7311 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7312 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7313 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7314 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7315 toneCurve->tonemap_points_cnt,
7316 CAM_MAX_TONEMAP_CURVE_SIZE);
7317 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7318 }
7319 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7320 (float*)toneCurve->curve.tonemap_points,
7321 toneCurve->tonemap_points_cnt * 2);
7322 }
7323
7324 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7325 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7326 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7327 predColorCorrectionGains->gains, 4);
7328 }
7329
7330 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7331 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7332 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7333 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7334 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7335 }
7336
7337 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7338 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7339 }
7340
7341 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7342 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7343 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7344 }
7345
7346 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7347 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7348 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7349 }
7350
7351 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7352 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7353 *effectMode);
7354 if (NAME_NOT_FOUND != val) {
7355 uint8_t fwk_effectMode = (uint8_t)val;
7356 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7357 }
7358 }
7359
7360 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7361 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7362 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7363 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7364 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7365 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7366 }
7367 int32_t fwk_testPatternData[4];
7368 fwk_testPatternData[0] = testPatternData->r;
7369 fwk_testPatternData[3] = testPatternData->b;
7370 switch (gCamCapability[mCameraId]->color_arrangement) {
7371 case CAM_FILTER_ARRANGEMENT_RGGB:
7372 case CAM_FILTER_ARRANGEMENT_GRBG:
7373 fwk_testPatternData[1] = testPatternData->gr;
7374 fwk_testPatternData[2] = testPatternData->gb;
7375 break;
7376 case CAM_FILTER_ARRANGEMENT_GBRG:
7377 case CAM_FILTER_ARRANGEMENT_BGGR:
7378 fwk_testPatternData[2] = testPatternData->gr;
7379 fwk_testPatternData[1] = testPatternData->gb;
7380 break;
7381 default:
7382 LOGE("color arrangement %d is not supported",
7383 gCamCapability[mCameraId]->color_arrangement);
7384 break;
7385 }
7386 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7387 }
7388
7389 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7390 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7391 }
7392
7393 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7394 String8 str((const char *)gps_methods);
7395 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7396 }
7397
7398 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7399 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7400 }
7401
7402 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7403 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7404 }
7405
7406 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7407 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7408 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7409 }
7410
7411 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7412 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7413 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7414 }
7415
7416 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7417 int32_t fwk_thumb_size[2];
7418 fwk_thumb_size[0] = thumb_size->width;
7419 fwk_thumb_size[1] = thumb_size->height;
7420 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7421 }
7422
7423 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7424 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7425 privateData,
7426 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7427 }
7428
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007429 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007430 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007431 meteringMode, 1);
7432 }
7433
Thierry Strudel54dc9782017-02-15 12:12:10 -08007434 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7435 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7436 LOGD("hdr_scene_data: %d %f\n",
7437 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7438 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7439 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7440 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7441 &isHdr, 1);
7442 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7443 &isHdrConfidence, 1);
7444 }
7445
7446
7447
Thierry Strudel3d639192016-09-09 11:52:26 -07007448 if (metadata->is_tuning_params_valid) {
7449 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7450 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7451 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7452
7453
7454 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7455 sizeof(uint32_t));
7456 data += sizeof(uint32_t);
7457
7458 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7459 sizeof(uint32_t));
7460 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7461 data += sizeof(uint32_t);
7462
7463 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7464 sizeof(uint32_t));
7465 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7466 data += sizeof(uint32_t);
7467
7468 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7469 sizeof(uint32_t));
7470 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7471 data += sizeof(uint32_t);
7472
7473 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7474 sizeof(uint32_t));
7475 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7476 data += sizeof(uint32_t);
7477
7478 metadata->tuning_params.tuning_mod3_data_size = 0;
7479 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7480 sizeof(uint32_t));
7481 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7482 data += sizeof(uint32_t);
7483
7484 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7485 TUNING_SENSOR_DATA_MAX);
7486 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7487 count);
7488 data += count;
7489
7490 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7491 TUNING_VFE_DATA_MAX);
7492 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7493 count);
7494 data += count;
7495
7496 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7497 TUNING_CPP_DATA_MAX);
7498 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7499 count);
7500 data += count;
7501
7502 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7503 TUNING_CAC_DATA_MAX);
7504 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7505 count);
7506 data += count;
7507
7508 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7509 (int32_t *)(void *)tuning_meta_data_blob,
7510 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7511 }
7512
7513 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7514 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7515 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7516 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7517 NEUTRAL_COL_POINTS);
7518 }
7519
7520 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7521 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7522 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7523 }
7524
7525 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7526 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7527 // Adjust crop region from sensor output coordinate system to active
7528 // array coordinate system.
7529 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
7530 hAeRegions->rect.width, hAeRegions->rect.height);
7531
7532 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
7533 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7534 REGIONS_TUPLE_COUNT);
7535 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7536 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
7537 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
7538 hAeRegions->rect.height);
7539 }
7540
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007541 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
7542 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
7543 if (NAME_NOT_FOUND != val) {
7544 uint8_t fwkAfMode = (uint8_t)val;
7545 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
7546 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
7547 } else {
7548 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
7549 val);
7550 }
7551 }
7552
Thierry Strudel3d639192016-09-09 11:52:26 -07007553 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7554 uint8_t fwk_afState = (uint8_t) *afState;
7555 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007556 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07007557 }
7558
7559 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7560 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7561 }
7562
7563 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7564 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7565 }
7566
7567 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7568 uint8_t fwk_lensState = *lensState;
7569 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7570 }
7571
Thierry Strudel3d639192016-09-09 11:52:26 -07007572
7573 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007574 uint32_t ab_mode = *hal_ab_mode;
7575 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7576 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7577 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7578 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007579 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007580 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007581 if (NAME_NOT_FOUND != val) {
7582 uint8_t fwk_ab_mode = (uint8_t)val;
7583 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7584 }
7585 }
7586
7587 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7588 int val = lookupFwkName(SCENE_MODES_MAP,
7589 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7590 if (NAME_NOT_FOUND != val) {
7591 uint8_t fwkBestshotMode = (uint8_t)val;
7592 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7593 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7594 } else {
7595 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7596 }
7597 }
7598
7599 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7600 uint8_t fwk_mode = (uint8_t) *mode;
7601 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7602 }
7603
7604 /* Constant metadata values to be update*/
7605 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7606 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7607
7608 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7609 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7610
7611 int32_t hotPixelMap[2];
7612 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7613
7614 // CDS
7615 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7616 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7617 }
7618
Thierry Strudel04e026f2016-10-10 11:27:36 -07007619 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7620 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007621 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007622 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7623 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7624 } else {
7625 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7626 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007627
7628 if(fwk_hdr != curr_hdr_state) {
7629 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7630 if(fwk_hdr)
7631 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7632 else
7633 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7634 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007635 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7636 }
7637
Thierry Strudel54dc9782017-02-15 12:12:10 -08007638 //binning correction
7639 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7640 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7641 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7642 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7643 }
7644
Thierry Strudel04e026f2016-10-10 11:27:36 -07007645 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007646 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007647 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7648 int8_t is_ir_on = 0;
7649
7650 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7651 if(is_ir_on != curr_ir_state) {
7652 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7653 if(is_ir_on)
7654 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7655 else
7656 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7657 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007658 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007659 }
7660
Thierry Strudel269c81a2016-10-12 12:13:59 -07007661 // AEC SPEED
7662 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7663 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7664 }
7665
7666 // AWB SPEED
7667 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7668 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7669 }
7670
Thierry Strudel3d639192016-09-09 11:52:26 -07007671 // TNR
7672 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7673 uint8_t tnr_enable = tnr->denoise_enable;
7674 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007675 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7676 int8_t is_tnr_on = 0;
7677
7678 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7679 if(is_tnr_on != curr_tnr_state) {
7680 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7681 if(is_tnr_on)
7682 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7683 else
7684 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7685 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007686
7687 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7688 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7689 }
7690
7691 // Reprocess crop data
7692 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7693 uint8_t cnt = crop_data->num_of_streams;
7694 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7695 // mm-qcamera-daemon only posts crop_data for streams
7696 // not linked to pproc. So no valid crop metadata is not
7697 // necessarily an error case.
7698 LOGD("No valid crop metadata entries");
7699 } else {
7700 uint32_t reproc_stream_id;
7701 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7702 LOGD("No reprocessible stream found, ignore crop data");
7703 } else {
7704 int rc = NO_ERROR;
7705 Vector<int32_t> roi_map;
7706 int32_t *crop = new int32_t[cnt*4];
7707 if (NULL == crop) {
7708 rc = NO_MEMORY;
7709 }
7710 if (NO_ERROR == rc) {
7711 int32_t streams_found = 0;
7712 for (size_t i = 0; i < cnt; i++) {
7713 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7714 if (pprocDone) {
7715 // HAL already does internal reprocessing,
7716 // either via reprocessing before JPEG encoding,
7717 // or offline postprocessing for pproc bypass case.
7718 crop[0] = 0;
7719 crop[1] = 0;
7720 crop[2] = mInputStreamInfo.dim.width;
7721 crop[3] = mInputStreamInfo.dim.height;
7722 } else {
7723 crop[0] = crop_data->crop_info[i].crop.left;
7724 crop[1] = crop_data->crop_info[i].crop.top;
7725 crop[2] = crop_data->crop_info[i].crop.width;
7726 crop[3] = crop_data->crop_info[i].crop.height;
7727 }
7728 roi_map.add(crop_data->crop_info[i].roi_map.left);
7729 roi_map.add(crop_data->crop_info[i].roi_map.top);
7730 roi_map.add(crop_data->crop_info[i].roi_map.width);
7731 roi_map.add(crop_data->crop_info[i].roi_map.height);
7732 streams_found++;
7733 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7734 crop[0], crop[1], crop[2], crop[3]);
7735 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7736 crop_data->crop_info[i].roi_map.left,
7737 crop_data->crop_info[i].roi_map.top,
7738 crop_data->crop_info[i].roi_map.width,
7739 crop_data->crop_info[i].roi_map.height);
7740 break;
7741
7742 }
7743 }
7744 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7745 &streams_found, 1);
7746 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7747 crop, (size_t)(streams_found * 4));
7748 if (roi_map.array()) {
7749 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7750 roi_map.array(), roi_map.size());
7751 }
7752 }
7753 if (crop) {
7754 delete [] crop;
7755 }
7756 }
7757 }
7758 }
7759
7760 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7761 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7762 // so hardcoding the CAC result to OFF mode.
7763 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7764 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7765 } else {
7766 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7767 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7768 *cacMode);
7769 if (NAME_NOT_FOUND != val) {
7770 uint8_t resultCacMode = (uint8_t)val;
7771 // check whether CAC result from CB is equal to Framework set CAC mode
7772 // If not equal then set the CAC mode came in corresponding request
7773 if (fwk_cacMode != resultCacMode) {
7774 resultCacMode = fwk_cacMode;
7775 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007776 //Check if CAC is disabled by property
7777 if (m_cacModeDisabled) {
7778 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7779 }
7780
Thierry Strudel3d639192016-09-09 11:52:26 -07007781 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7782 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7783 } else {
7784 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7785 }
7786 }
7787 }
7788
7789 // Post blob of cam_cds_data through vendor tag.
7790 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7791 uint8_t cnt = cdsInfo->num_of_streams;
7792 cam_cds_data_t cdsDataOverride;
7793 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7794 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7795 cdsDataOverride.num_of_streams = 1;
7796 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7797 uint32_t reproc_stream_id;
7798 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7799 LOGD("No reprocessible stream found, ignore cds data");
7800 } else {
7801 for (size_t i = 0; i < cnt; i++) {
7802 if (cdsInfo->cds_info[i].stream_id ==
7803 reproc_stream_id) {
7804 cdsDataOverride.cds_info[0].cds_enable =
7805 cdsInfo->cds_info[i].cds_enable;
7806 break;
7807 }
7808 }
7809 }
7810 } else {
7811 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7812 }
7813 camMetadata.update(QCAMERA3_CDS_INFO,
7814 (uint8_t *)&cdsDataOverride,
7815 sizeof(cam_cds_data_t));
7816 }
7817
7818 // Ldaf calibration data
7819 if (!mLdafCalibExist) {
7820 IF_META_AVAILABLE(uint32_t, ldafCalib,
7821 CAM_INTF_META_LDAF_EXIF, metadata) {
7822 mLdafCalibExist = true;
7823 mLdafCalib[0] = ldafCalib[0];
7824 mLdafCalib[1] = ldafCalib[1];
7825 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7826 ldafCalib[0], ldafCalib[1]);
7827 }
7828 }
7829
Thierry Strudel54dc9782017-02-15 12:12:10 -08007830 // EXIF debug data through vendor tag
7831 /*
7832 * Mobicat Mask can assume 3 values:
7833 * 1 refers to Mobicat data,
7834 * 2 refers to Stats Debug and Exif Debug Data
7835 * 3 refers to Mobicat and Stats Debug Data
7836 * We want to make sure that we are sending Exif debug data
7837 * only when Mobicat Mask is 2.
7838 */
7839 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
7840 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
7841 (uint8_t *)(void *)mExifParams.debug_params,
7842 sizeof(mm_jpeg_debug_exif_params_t));
7843 }
7844
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007845 // Reprocess and DDM debug data through vendor tag
7846 cam_reprocess_info_t repro_info;
7847 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007848 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7849 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007850 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007851 }
7852 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7853 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007854 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007855 }
7856 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7857 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007858 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007859 }
7860 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7861 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007862 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007863 }
7864 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7865 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007866 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007867 }
7868 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007869 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007870 }
7871 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7872 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007873 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007874 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007875 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7876 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7877 }
7878 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7879 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7880 }
7881 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7882 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007883
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007884 // INSTANT AEC MODE
7885 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7886 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7887 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7888 }
7889
Shuzhen Wange763e802016-03-31 10:24:29 -07007890 // AF scene change
7891 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7892 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7893 }
7894
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07007895 // Enable ZSL
7896 if (enableZsl != nullptr) {
7897 uint8_t value = *enableZsl ?
7898 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
7899 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
7900 }
7901
Thierry Strudel3d639192016-09-09 11:52:26 -07007902 resultMetadata = camMetadata.release();
7903 return resultMetadata;
7904}
7905
7906/*===========================================================================
7907 * FUNCTION : saveExifParams
7908 *
7909 * DESCRIPTION:
7910 *
7911 * PARAMETERS :
7912 * @metadata : metadata information from callback
7913 *
7914 * RETURN : none
7915 *
7916 *==========================================================================*/
7917void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7918{
7919 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7920 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7921 if (mExifParams.debug_params) {
7922 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7923 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7924 }
7925 }
7926 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7927 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7928 if (mExifParams.debug_params) {
7929 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7930 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7931 }
7932 }
7933 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7934 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7935 if (mExifParams.debug_params) {
7936 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7937 mExifParams.debug_params->af_debug_params_valid = TRUE;
7938 }
7939 }
7940 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7941 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7942 if (mExifParams.debug_params) {
7943 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7944 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7945 }
7946 }
7947 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7948 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7949 if (mExifParams.debug_params) {
7950 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7951 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7952 }
7953 }
7954 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7955 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7956 if (mExifParams.debug_params) {
7957 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7958 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7959 }
7960 }
7961 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7962 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7963 if (mExifParams.debug_params) {
7964 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7965 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7966 }
7967 }
7968 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7969 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7970 if (mExifParams.debug_params) {
7971 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7972 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7973 }
7974 }
7975}
7976
7977/*===========================================================================
7978 * FUNCTION : get3AExifParams
7979 *
7980 * DESCRIPTION:
7981 *
7982 * PARAMETERS : none
7983 *
7984 *
7985 * RETURN : mm_jpeg_exif_params_t
7986 *
7987 *==========================================================================*/
7988mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7989{
7990 return mExifParams;
7991}
7992
7993/*===========================================================================
7994 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7995 *
7996 * DESCRIPTION:
7997 *
7998 * PARAMETERS :
7999 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008000 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8001 * urgent metadata in a batch. Always true for
8002 * non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07008003 *
8004 * RETURN : camera_metadata_t*
8005 * metadata in a format specified by fwk
8006 *==========================================================================*/
8007camera_metadata_t*
8008QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008009 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07008010{
8011 CameraMetadata camMetadata;
8012 camera_metadata_t *resultMetadata;
8013
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008014 if (!lastUrgentMetadataInBatch) {
8015 /* In batch mode, use empty metadata if this is not the last in batch
8016 */
8017 resultMetadata = allocate_camera_metadata(0, 0);
8018 return resultMetadata;
8019 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008020
8021 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8022 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8023 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8024 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8025 }
8026
8027 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8028 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8029 &aecTrigger->trigger, 1);
8030 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8031 &aecTrigger->trigger_id, 1);
8032 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8033 aecTrigger->trigger);
8034 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8035 aecTrigger->trigger_id);
8036 }
8037
8038 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8039 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8040 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8041 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8042 }
8043
Thierry Strudel3d639192016-09-09 11:52:26 -07008044 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8045 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8046 &af_trigger->trigger, 1);
8047 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8048 af_trigger->trigger);
8049 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
8050 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8051 af_trigger->trigger_id);
8052 }
8053
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008054 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8055 /*af regions*/
8056 int32_t afRegions[REGIONS_TUPLE_COUNT];
8057 // Adjust crop region from sensor output coordinate system to active
8058 // array coordinate system.
8059 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
8060 hAfRegions->rect.width, hAfRegions->rect.height);
8061
8062 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
8063 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8064 REGIONS_TUPLE_COUNT);
8065 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8066 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
8067 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
8068 hAfRegions->rect.height);
8069 }
8070
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008071 // AF region confidence
8072 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8073 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8074 }
8075
Thierry Strudel3d639192016-09-09 11:52:26 -07008076 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8077 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8078 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8079 if (NAME_NOT_FOUND != val) {
8080 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8081 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8082 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8083 } else {
8084 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8085 }
8086 }
8087
8088 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8089 uint32_t aeMode = CAM_AE_MODE_MAX;
8090 int32_t flashMode = CAM_FLASH_MODE_MAX;
8091 int32_t redeye = -1;
8092 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8093 aeMode = *pAeMode;
8094 }
8095 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8096 flashMode = *pFlashMode;
8097 }
8098 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8099 redeye = *pRedeye;
8100 }
8101
8102 if (1 == redeye) {
8103 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8104 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8105 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8106 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8107 flashMode);
8108 if (NAME_NOT_FOUND != val) {
8109 fwk_aeMode = (uint8_t)val;
8110 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8111 } else {
8112 LOGE("Unsupported flash mode %d", flashMode);
8113 }
8114 } else if (aeMode == CAM_AE_MODE_ON) {
8115 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8116 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8117 } else if (aeMode == CAM_AE_MODE_OFF) {
8118 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8119 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008120 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8121 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8122 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008123 } else {
8124 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8125 "flashMode:%d, aeMode:%u!!!",
8126 redeye, flashMode, aeMode);
8127 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008128 if (mInstantAEC) {
8129 // Increment frame Idx count untill a bound reached for instant AEC.
8130 mInstantAecFrameIdxCount++;
8131 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8132 CAM_INTF_META_AEC_INFO, metadata) {
8133 LOGH("ae_params->settled = %d",ae_params->settled);
8134 // If AEC settled, or if number of frames reached bound value,
8135 // should reset instant AEC.
8136 if (ae_params->settled ||
8137 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8138 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8139 mInstantAEC = false;
8140 mResetInstantAEC = true;
8141 mInstantAecFrameIdxCount = 0;
8142 }
8143 }
8144 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008145 resultMetadata = camMetadata.release();
8146 return resultMetadata;
8147}
8148
8149/*===========================================================================
8150 * FUNCTION : dumpMetadataToFile
8151 *
8152 * DESCRIPTION: Dumps tuning metadata to file system
8153 *
8154 * PARAMETERS :
8155 * @meta : tuning metadata
8156 * @dumpFrameCount : current dump frame count
8157 * @enabled : Enable mask
8158 *
8159 *==========================================================================*/
8160void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8161 uint32_t &dumpFrameCount,
8162 bool enabled,
8163 const char *type,
8164 uint32_t frameNumber)
8165{
8166 //Some sanity checks
8167 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8168 LOGE("Tuning sensor data size bigger than expected %d: %d",
8169 meta.tuning_sensor_data_size,
8170 TUNING_SENSOR_DATA_MAX);
8171 return;
8172 }
8173
8174 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8175 LOGE("Tuning VFE data size bigger than expected %d: %d",
8176 meta.tuning_vfe_data_size,
8177 TUNING_VFE_DATA_MAX);
8178 return;
8179 }
8180
8181 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8182 LOGE("Tuning CPP data size bigger than expected %d: %d",
8183 meta.tuning_cpp_data_size,
8184 TUNING_CPP_DATA_MAX);
8185 return;
8186 }
8187
8188 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8189 LOGE("Tuning CAC data size bigger than expected %d: %d",
8190 meta.tuning_cac_data_size,
8191 TUNING_CAC_DATA_MAX);
8192 return;
8193 }
8194 //
8195
8196 if(enabled){
8197 char timeBuf[FILENAME_MAX];
8198 char buf[FILENAME_MAX];
8199 memset(buf, 0, sizeof(buf));
8200 memset(timeBuf, 0, sizeof(timeBuf));
8201 time_t current_time;
8202 struct tm * timeinfo;
8203 time (&current_time);
8204 timeinfo = localtime (&current_time);
8205 if (timeinfo != NULL) {
8206 strftime (timeBuf, sizeof(timeBuf),
8207 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8208 }
8209 String8 filePath(timeBuf);
8210 snprintf(buf,
8211 sizeof(buf),
8212 "%dm_%s_%d.bin",
8213 dumpFrameCount,
8214 type,
8215 frameNumber);
8216 filePath.append(buf);
8217 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8218 if (file_fd >= 0) {
8219 ssize_t written_len = 0;
8220 meta.tuning_data_version = TUNING_DATA_VERSION;
8221 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8222 written_len += write(file_fd, data, sizeof(uint32_t));
8223 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8224 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8225 written_len += write(file_fd, data, sizeof(uint32_t));
8226 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8227 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8228 written_len += write(file_fd, data, sizeof(uint32_t));
8229 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8230 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8231 written_len += write(file_fd, data, sizeof(uint32_t));
8232 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8233 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8234 written_len += write(file_fd, data, sizeof(uint32_t));
8235 meta.tuning_mod3_data_size = 0;
8236 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8237 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8238 written_len += write(file_fd, data, sizeof(uint32_t));
8239 size_t total_size = meta.tuning_sensor_data_size;
8240 data = (void *)((uint8_t *)&meta.data);
8241 written_len += write(file_fd, data, total_size);
8242 total_size = meta.tuning_vfe_data_size;
8243 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8244 written_len += write(file_fd, data, total_size);
8245 total_size = meta.tuning_cpp_data_size;
8246 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8247 written_len += write(file_fd, data, total_size);
8248 total_size = meta.tuning_cac_data_size;
8249 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8250 written_len += write(file_fd, data, total_size);
8251 close(file_fd);
8252 }else {
8253 LOGE("fail to open file for metadata dumping");
8254 }
8255 }
8256}
8257
8258/*===========================================================================
8259 * FUNCTION : cleanAndSortStreamInfo
8260 *
8261 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8262 * and sort them such that raw stream is at the end of the list
8263 * This is a workaround for camera daemon constraint.
8264 *
8265 * PARAMETERS : None
8266 *
8267 *==========================================================================*/
8268void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8269{
8270 List<stream_info_t *> newStreamInfo;
8271
8272 /*clean up invalid streams*/
8273 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8274 it != mStreamInfo.end();) {
8275 if(((*it)->status) == INVALID){
8276 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8277 delete channel;
8278 free(*it);
8279 it = mStreamInfo.erase(it);
8280 } else {
8281 it++;
8282 }
8283 }
8284
8285 // Move preview/video/callback/snapshot streams into newList
8286 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8287 it != mStreamInfo.end();) {
8288 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8289 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8290 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8291 newStreamInfo.push_back(*it);
8292 it = mStreamInfo.erase(it);
8293 } else
8294 it++;
8295 }
8296 // Move raw streams into newList
8297 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8298 it != mStreamInfo.end();) {
8299 newStreamInfo.push_back(*it);
8300 it = mStreamInfo.erase(it);
8301 }
8302
8303 mStreamInfo = newStreamInfo;
8304}
8305
8306/*===========================================================================
8307 * FUNCTION : extractJpegMetadata
8308 *
8309 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8310 * JPEG metadata is cached in HAL, and return as part of capture
8311 * result when metadata is returned from camera daemon.
8312 *
8313 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8314 * @request: capture request
8315 *
8316 *==========================================================================*/
8317void QCamera3HardwareInterface::extractJpegMetadata(
8318 CameraMetadata& jpegMetadata,
8319 const camera3_capture_request_t *request)
8320{
8321 CameraMetadata frame_settings;
8322 frame_settings = request->settings;
8323
8324 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8325 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8326 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8327 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8328
8329 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8330 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8331 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8332 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8333
8334 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8335 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8336 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8337 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8338
8339 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8340 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8341 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8342 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8343
8344 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8345 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8346 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8347 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8348
8349 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8350 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8351 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8352 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8353
8354 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8355 int32_t thumbnail_size[2];
8356 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8357 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8358 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8359 int32_t orientation =
8360 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008361 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008362 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8363 int32_t temp;
8364 temp = thumbnail_size[0];
8365 thumbnail_size[0] = thumbnail_size[1];
8366 thumbnail_size[1] = temp;
8367 }
8368 }
8369 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8370 thumbnail_size,
8371 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8372 }
8373
8374}
8375
8376/*===========================================================================
8377 * FUNCTION : convertToRegions
8378 *
8379 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8380 *
8381 * PARAMETERS :
8382 * @rect : cam_rect_t struct to convert
8383 * @region : int32_t destination array
8384 * @weight : if we are converting from cam_area_t, weight is valid
8385 * else weight = -1
8386 *
8387 *==========================================================================*/
8388void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8389 int32_t *region, int weight)
8390{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008391 region[FACE_LEFT] = rect.left;
8392 region[FACE_TOP] = rect.top;
8393 region[FACE_RIGHT] = rect.left + rect.width;
8394 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008395 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008396 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008397 }
8398}
8399
8400/*===========================================================================
8401 * FUNCTION : convertFromRegions
8402 *
8403 * DESCRIPTION: helper method to convert from array to cam_rect_t
8404 *
8405 * PARAMETERS :
8406 * @rect : cam_rect_t struct to convert
8407 * @region : int32_t destination array
8408 * @weight : if we are converting from cam_area_t, weight is valid
8409 * else weight = -1
8410 *
8411 *==========================================================================*/
8412void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008413 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008414{
Thierry Strudel3d639192016-09-09 11:52:26 -07008415 int32_t x_min = frame_settings.find(tag).data.i32[0];
8416 int32_t y_min = frame_settings.find(tag).data.i32[1];
8417 int32_t x_max = frame_settings.find(tag).data.i32[2];
8418 int32_t y_max = frame_settings.find(tag).data.i32[3];
8419 roi.weight = frame_settings.find(tag).data.i32[4];
8420 roi.rect.left = x_min;
8421 roi.rect.top = y_min;
8422 roi.rect.width = x_max - x_min;
8423 roi.rect.height = y_max - y_min;
8424}
8425
8426/*===========================================================================
8427 * FUNCTION : resetIfNeededROI
8428 *
8429 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8430 * crop region
8431 *
8432 * PARAMETERS :
8433 * @roi : cam_area_t struct to resize
8434 * @scalerCropRegion : cam_crop_region_t region to compare against
8435 *
8436 *
8437 *==========================================================================*/
8438bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8439 const cam_crop_region_t* scalerCropRegion)
8440{
8441 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8442 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8443 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8444 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8445
8446 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8447 * without having this check the calculations below to validate if the roi
8448 * is inside scalar crop region will fail resulting in the roi not being
8449 * reset causing algorithm to continue to use stale roi window
8450 */
8451 if (roi->weight == 0) {
8452 return true;
8453 }
8454
8455 if ((roi_x_max < scalerCropRegion->left) ||
8456 // right edge of roi window is left of scalar crop's left edge
8457 (roi_y_max < scalerCropRegion->top) ||
8458 // bottom edge of roi window is above scalar crop's top edge
8459 (roi->rect.left > crop_x_max) ||
8460 // left edge of roi window is beyond(right) of scalar crop's right edge
8461 (roi->rect.top > crop_y_max)){
8462 // top edge of roi windo is above scalar crop's top edge
8463 return false;
8464 }
8465 if (roi->rect.left < scalerCropRegion->left) {
8466 roi->rect.left = scalerCropRegion->left;
8467 }
8468 if (roi->rect.top < scalerCropRegion->top) {
8469 roi->rect.top = scalerCropRegion->top;
8470 }
8471 if (roi_x_max > crop_x_max) {
8472 roi_x_max = crop_x_max;
8473 }
8474 if (roi_y_max > crop_y_max) {
8475 roi_y_max = crop_y_max;
8476 }
8477 roi->rect.width = roi_x_max - roi->rect.left;
8478 roi->rect.height = roi_y_max - roi->rect.top;
8479 return true;
8480}
8481
8482/*===========================================================================
8483 * FUNCTION : convertLandmarks
8484 *
8485 * DESCRIPTION: helper method to extract the landmarks from face detection info
8486 *
8487 * PARAMETERS :
8488 * @landmark_data : input landmark data to be converted
8489 * @landmarks : int32_t destination array
8490 *
8491 *
8492 *==========================================================================*/
8493void QCamera3HardwareInterface::convertLandmarks(
8494 cam_face_landmarks_info_t landmark_data,
8495 int32_t *landmarks)
8496{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008497 if (landmark_data.is_left_eye_valid) {
8498 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8499 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8500 } else {
8501 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8502 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8503 }
8504
8505 if (landmark_data.is_right_eye_valid) {
8506 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8507 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8508 } else {
8509 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8510 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8511 }
8512
8513 if (landmark_data.is_mouth_valid) {
8514 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8515 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8516 } else {
8517 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8518 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8519 }
8520}
8521
8522/*===========================================================================
8523 * FUNCTION : setInvalidLandmarks
8524 *
8525 * DESCRIPTION: helper method to set invalid landmarks
8526 *
8527 * PARAMETERS :
8528 * @landmarks : int32_t destination array
8529 *
8530 *
8531 *==========================================================================*/
8532void QCamera3HardwareInterface::setInvalidLandmarks(
8533 int32_t *landmarks)
8534{
8535 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8536 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8537 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8538 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8539 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8540 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008541}
8542
8543#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008544
8545/*===========================================================================
8546 * FUNCTION : getCapabilities
8547 *
8548 * DESCRIPTION: query camera capability from back-end
8549 *
8550 * PARAMETERS :
8551 * @ops : mm-interface ops structure
8552 * @cam_handle : camera handle for which we need capability
8553 *
8554 * RETURN : ptr type of capability structure
8555 * capability for success
8556 * NULL for failure
8557 *==========================================================================*/
8558cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8559 uint32_t cam_handle)
8560{
8561 int rc = NO_ERROR;
8562 QCamera3HeapMemory *capabilityHeap = NULL;
8563 cam_capability_t *cap_ptr = NULL;
8564
8565 if (ops == NULL) {
8566 LOGE("Invalid arguments");
8567 return NULL;
8568 }
8569
8570 capabilityHeap = new QCamera3HeapMemory(1);
8571 if (capabilityHeap == NULL) {
8572 LOGE("creation of capabilityHeap failed");
8573 return NULL;
8574 }
8575
8576 /* Allocate memory for capability buffer */
8577 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8578 if(rc != OK) {
8579 LOGE("No memory for cappability");
8580 goto allocate_failed;
8581 }
8582
8583 /* Map memory for capability buffer */
8584 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8585
8586 rc = ops->map_buf(cam_handle,
8587 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8588 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8589 if(rc < 0) {
8590 LOGE("failed to map capability buffer");
8591 rc = FAILED_TRANSACTION;
8592 goto map_failed;
8593 }
8594
8595 /* Query Capability */
8596 rc = ops->query_capability(cam_handle);
8597 if(rc < 0) {
8598 LOGE("failed to query capability");
8599 rc = FAILED_TRANSACTION;
8600 goto query_failed;
8601 }
8602
8603 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8604 if (cap_ptr == NULL) {
8605 LOGE("out of memory");
8606 rc = NO_MEMORY;
8607 goto query_failed;
8608 }
8609
8610 memset(cap_ptr, 0, sizeof(cam_capability_t));
8611 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8612
8613 int index;
8614 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8615 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8616 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8617 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8618 }
8619
8620query_failed:
8621 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8622map_failed:
8623 capabilityHeap->deallocate();
8624allocate_failed:
8625 delete capabilityHeap;
8626
8627 if (rc != NO_ERROR) {
8628 return NULL;
8629 } else {
8630 return cap_ptr;
8631 }
8632}
8633
Thierry Strudel3d639192016-09-09 11:52:26 -07008634/*===========================================================================
8635 * FUNCTION : initCapabilities
8636 *
8637 * DESCRIPTION: initialize camera capabilities in static data struct
8638 *
8639 * PARAMETERS :
8640 * @cameraId : camera Id
8641 *
8642 * RETURN : int32_t type of status
8643 * NO_ERROR -- success
8644 * none-zero failure code
8645 *==========================================================================*/
8646int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8647{
8648 int rc = 0;
8649 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008650 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008651
8652 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8653 if (rc) {
8654 LOGE("camera_open failed. rc = %d", rc);
8655 goto open_failed;
8656 }
8657 if (!cameraHandle) {
8658 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8659 goto open_failed;
8660 }
8661
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008662 handle = get_main_camera_handle(cameraHandle->camera_handle);
8663 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8664 if (gCamCapability[cameraId] == NULL) {
8665 rc = FAILED_TRANSACTION;
8666 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008667 }
8668
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008669 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008670 if (is_dual_camera_by_idx(cameraId)) {
8671 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8672 gCamCapability[cameraId]->aux_cam_cap =
8673 getCapabilities(cameraHandle->ops, handle);
8674 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8675 rc = FAILED_TRANSACTION;
8676 free(gCamCapability[cameraId]);
8677 goto failed_op;
8678 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008679
8680 // Copy the main camera capability to main_cam_cap struct
8681 gCamCapability[cameraId]->main_cam_cap =
8682 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8683 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8684 LOGE("out of memory");
8685 rc = NO_MEMORY;
8686 goto failed_op;
8687 }
8688 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8689 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008690 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008691failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07008692 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
8693 cameraHandle = NULL;
8694open_failed:
8695 return rc;
8696}
8697
8698/*==========================================================================
8699 * FUNCTION : get3Aversion
8700 *
8701 * DESCRIPTION: get the Q3A S/W version
8702 *
8703 * PARAMETERS :
8704 * @sw_version: Reference of Q3A structure which will hold version info upon
8705 * return
8706 *
8707 * RETURN : None
8708 *
8709 *==========================================================================*/
8710void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
8711{
8712 if(gCamCapability[mCameraId])
8713 sw_version = gCamCapability[mCameraId]->q3a_version;
8714 else
8715 LOGE("Capability structure NULL!");
8716}
8717
8718
8719/*===========================================================================
8720 * FUNCTION : initParameters
8721 *
8722 * DESCRIPTION: initialize camera parameters
8723 *
8724 * PARAMETERS :
8725 *
8726 * RETURN : int32_t type of status
8727 * NO_ERROR -- success
8728 * none-zero failure code
8729 *==========================================================================*/
8730int QCamera3HardwareInterface::initParameters()
8731{
8732 int rc = 0;
8733
8734 //Allocate Set Param Buffer
8735 mParamHeap = new QCamera3HeapMemory(1);
8736 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8737 if(rc != OK) {
8738 rc = NO_MEMORY;
8739 LOGE("Failed to allocate SETPARM Heap memory");
8740 delete mParamHeap;
8741 mParamHeap = NULL;
8742 return rc;
8743 }
8744
8745 //Map memory for parameters buffer
8746 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8747 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8748 mParamHeap->getFd(0),
8749 sizeof(metadata_buffer_t),
8750 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8751 if(rc < 0) {
8752 LOGE("failed to map SETPARM buffer");
8753 rc = FAILED_TRANSACTION;
8754 mParamHeap->deallocate();
8755 delete mParamHeap;
8756 mParamHeap = NULL;
8757 return rc;
8758 }
8759
8760 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8761
8762 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8763 return rc;
8764}
8765
8766/*===========================================================================
8767 * FUNCTION : deinitParameters
8768 *
8769 * DESCRIPTION: de-initialize camera parameters
8770 *
8771 * PARAMETERS :
8772 *
8773 * RETURN : NONE
8774 *==========================================================================*/
8775void QCamera3HardwareInterface::deinitParameters()
8776{
8777 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8778 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8779
8780 mParamHeap->deallocate();
8781 delete mParamHeap;
8782 mParamHeap = NULL;
8783
8784 mParameters = NULL;
8785
8786 free(mPrevParameters);
8787 mPrevParameters = NULL;
8788}
8789
8790/*===========================================================================
8791 * FUNCTION : calcMaxJpegSize
8792 *
8793 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8794 *
8795 * PARAMETERS :
8796 *
8797 * RETURN : max_jpeg_size
8798 *==========================================================================*/
8799size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8800{
8801 size_t max_jpeg_size = 0;
8802 size_t temp_width, temp_height;
8803 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8804 MAX_SIZES_CNT);
8805 for (size_t i = 0; i < count; i++) {
8806 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8807 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8808 if (temp_width * temp_height > max_jpeg_size ) {
8809 max_jpeg_size = temp_width * temp_height;
8810 }
8811 }
8812 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8813 return max_jpeg_size;
8814}
8815
8816/*===========================================================================
8817 * FUNCTION : getMaxRawSize
8818 *
8819 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8820 *
8821 * PARAMETERS :
8822 *
8823 * RETURN : Largest supported Raw Dimension
8824 *==========================================================================*/
8825cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8826{
8827 int max_width = 0;
8828 cam_dimension_t maxRawSize;
8829
8830 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8831 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8832 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8833 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8834 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8835 }
8836 }
8837 return maxRawSize;
8838}
8839
8840
8841/*===========================================================================
8842 * FUNCTION : calcMaxJpegDim
8843 *
8844 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8845 *
8846 * PARAMETERS :
8847 *
8848 * RETURN : max_jpeg_dim
8849 *==========================================================================*/
8850cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8851{
8852 cam_dimension_t max_jpeg_dim;
8853 cam_dimension_t curr_jpeg_dim;
8854 max_jpeg_dim.width = 0;
8855 max_jpeg_dim.height = 0;
8856 curr_jpeg_dim.width = 0;
8857 curr_jpeg_dim.height = 0;
8858 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8859 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8860 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8861 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8862 max_jpeg_dim.width * max_jpeg_dim.height ) {
8863 max_jpeg_dim.width = curr_jpeg_dim.width;
8864 max_jpeg_dim.height = curr_jpeg_dim.height;
8865 }
8866 }
8867 return max_jpeg_dim;
8868}
8869
8870/*===========================================================================
8871 * FUNCTION : addStreamConfig
8872 *
8873 * DESCRIPTION: adds the stream configuration to the array
8874 *
8875 * PARAMETERS :
8876 * @available_stream_configs : pointer to stream configuration array
8877 * @scalar_format : scalar format
8878 * @dim : configuration dimension
8879 * @config_type : input or output configuration type
8880 *
8881 * RETURN : NONE
8882 *==========================================================================*/
8883void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8884 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8885{
8886 available_stream_configs.add(scalar_format);
8887 available_stream_configs.add(dim.width);
8888 available_stream_configs.add(dim.height);
8889 available_stream_configs.add(config_type);
8890}
8891
8892/*===========================================================================
8893 * FUNCTION : suppportBurstCapture
8894 *
8895 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8896 *
8897 * PARAMETERS :
8898 * @cameraId : camera Id
8899 *
8900 * RETURN : true if camera supports BURST_CAPTURE
8901 * false otherwise
8902 *==========================================================================*/
8903bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8904{
8905 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8906 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8907 const int32_t highResWidth = 3264;
8908 const int32_t highResHeight = 2448;
8909
8910 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8911 // Maximum resolution images cannot be captured at >= 10fps
8912 // -> not supporting BURST_CAPTURE
8913 return false;
8914 }
8915
8916 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8917 // Maximum resolution images can be captured at >= 20fps
8918 // --> supporting BURST_CAPTURE
8919 return true;
8920 }
8921
8922 // Find the smallest highRes resolution, or largest resolution if there is none
8923 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8924 MAX_SIZES_CNT);
8925 size_t highRes = 0;
8926 while ((highRes + 1 < totalCnt) &&
8927 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8928 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8929 highResWidth * highResHeight)) {
8930 highRes++;
8931 }
8932 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8933 return true;
8934 } else {
8935 return false;
8936 }
8937}
8938
8939/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00008940 * FUNCTION : getPDStatIndex
8941 *
8942 * DESCRIPTION: Return the meta raw phase detection statistics index if present
8943 *
8944 * PARAMETERS :
8945 * @caps : camera capabilities
8946 *
8947 * RETURN : int32_t type
8948 * non-negative - on success
8949 * -1 - on failure
8950 *==========================================================================*/
8951int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
8952 if (nullptr == caps) {
8953 return -1;
8954 }
8955
8956 uint32_t metaRawCount = caps->meta_raw_channel_count;
8957 int32_t ret = -1;
8958 for (size_t i = 0; i < metaRawCount; i++) {
8959 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
8960 ret = i;
8961 break;
8962 }
8963 }
8964
8965 return ret;
8966}
8967
8968/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07008969 * FUNCTION : initStaticMetadata
8970 *
8971 * DESCRIPTION: initialize the static metadata
8972 *
8973 * PARAMETERS :
8974 * @cameraId : camera Id
8975 *
8976 * RETURN : int32_t type of status
8977 * 0 -- success
8978 * non-zero failure code
8979 *==========================================================================*/
8980int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8981{
8982 int rc = 0;
8983 CameraMetadata staticInfo;
8984 size_t count = 0;
8985 bool limitedDevice = false;
8986 char prop[PROPERTY_VALUE_MAX];
8987 bool supportBurst = false;
8988
8989 supportBurst = supportBurstCapture(cameraId);
8990
8991 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8992 * guaranteed or if min fps of max resolution is less than 20 fps, its
8993 * advertised as limited device*/
8994 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8995 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8996 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8997 !supportBurst;
8998
8999 uint8_t supportedHwLvl = limitedDevice ?
9000 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009001#ifndef USE_HAL_3_3
9002 // LEVEL_3 - This device will support level 3.
9003 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9004#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009005 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009006#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009007
9008 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9009 &supportedHwLvl, 1);
9010
9011 bool facingBack = false;
9012 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9013 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9014 facingBack = true;
9015 }
9016 /*HAL 3 only*/
9017 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9018 &gCamCapability[cameraId]->min_focus_distance, 1);
9019
9020 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9021 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9022
9023 /*should be using focal lengths but sensor doesn't provide that info now*/
9024 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9025 &gCamCapability[cameraId]->focal_length,
9026 1);
9027
9028 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9029 gCamCapability[cameraId]->apertures,
9030 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9031
9032 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9033 gCamCapability[cameraId]->filter_densities,
9034 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9035
9036
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009037 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9038 size_t mode_count =
9039 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9040 for (size_t i = 0; i < mode_count; i++) {
9041 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9042 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009043 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009044 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009045
9046 int32_t lens_shading_map_size[] = {
9047 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9048 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9049 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9050 lens_shading_map_size,
9051 sizeof(lens_shading_map_size)/sizeof(int32_t));
9052
9053 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9054 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9055
9056 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9057 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9058
9059 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9060 &gCamCapability[cameraId]->max_frame_duration, 1);
9061
9062 camera_metadata_rational baseGainFactor = {
9063 gCamCapability[cameraId]->base_gain_factor.numerator,
9064 gCamCapability[cameraId]->base_gain_factor.denominator};
9065 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9066 &baseGainFactor, 1);
9067
9068 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9069 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9070
9071 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9072 gCamCapability[cameraId]->pixel_array_size.height};
9073 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9074 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9075
9076 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9077 gCamCapability[cameraId]->active_array_size.top,
9078 gCamCapability[cameraId]->active_array_size.width,
9079 gCamCapability[cameraId]->active_array_size.height};
9080 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9081 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9082
9083 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9084 &gCamCapability[cameraId]->white_level, 1);
9085
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009086 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9087 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9088 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009089 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009090 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009091
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009092#ifndef USE_HAL_3_3
9093 bool hasBlackRegions = false;
9094 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9095 LOGW("black_region_count: %d is bounded to %d",
9096 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9097 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9098 }
9099 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9100 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9101 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9102 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9103 }
9104 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9105 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9106 hasBlackRegions = true;
9107 }
9108#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009109 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9110 &gCamCapability[cameraId]->flash_charge_duration, 1);
9111
9112 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9113 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9114
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009115 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9116 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9117 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009118 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9119 &timestampSource, 1);
9120
Thierry Strudel54dc9782017-02-15 12:12:10 -08009121 //update histogram vendor data
9122 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009123 &gCamCapability[cameraId]->histogram_size, 1);
9124
Thierry Strudel54dc9782017-02-15 12:12:10 -08009125 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009126 &gCamCapability[cameraId]->max_histogram_count, 1);
9127
Shuzhen Wang14415f52016-11-16 18:26:18 -08009128 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9129 //so that app can request fewer number of bins than the maximum supported.
9130 std::vector<int32_t> histBins;
9131 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9132 histBins.push_back(maxHistBins);
9133 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9134 (maxHistBins & 0x1) == 0) {
9135 histBins.push_back(maxHistBins >> 1);
9136 maxHistBins >>= 1;
9137 }
9138 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9139 histBins.data(), histBins.size());
9140
Thierry Strudel3d639192016-09-09 11:52:26 -07009141 int32_t sharpness_map_size[] = {
9142 gCamCapability[cameraId]->sharpness_map_size.width,
9143 gCamCapability[cameraId]->sharpness_map_size.height};
9144
9145 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9146 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9147
9148 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9149 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9150
Emilian Peev0f3c3162017-03-15 12:57:46 +00009151 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9152 if (0 <= indexPD) {
9153 // Advertise PD stats data as part of the Depth capabilities
9154 int32_t depthWidth =
9155 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9156 int32_t depthHeight =
9157 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
9158 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9159 assert(0 < depthSamplesCount);
9160 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9161 &depthSamplesCount, 1);
9162
9163 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9164 depthHeight,
9165 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9166 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9167 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9168 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9169 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9170
9171 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9172 depthHeight, 33333333,
9173 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9174 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9175 depthMinDuration,
9176 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9177
9178 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9179 depthHeight, 0,
9180 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9181 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9182 depthStallDuration,
9183 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9184
9185 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9186 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
9187 }
9188
Thierry Strudel3d639192016-09-09 11:52:26 -07009189 int32_t scalar_formats[] = {
9190 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9191 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9192 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9193 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9194 HAL_PIXEL_FORMAT_RAW10,
9195 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009196 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9197 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9198 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009199
9200 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9201 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9202 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9203 count, MAX_SIZES_CNT, available_processed_sizes);
9204 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9205 available_processed_sizes, count * 2);
9206
9207 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9208 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9209 makeTable(gCamCapability[cameraId]->raw_dim,
9210 count, MAX_SIZES_CNT, available_raw_sizes);
9211 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9212 available_raw_sizes, count * 2);
9213
9214 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9215 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9216 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9217 count, MAX_SIZES_CNT, available_fps_ranges);
9218 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9219 available_fps_ranges, count * 2);
9220
9221 camera_metadata_rational exposureCompensationStep = {
9222 gCamCapability[cameraId]->exp_compensation_step.numerator,
9223 gCamCapability[cameraId]->exp_compensation_step.denominator};
9224 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9225 &exposureCompensationStep, 1);
9226
9227 Vector<uint8_t> availableVstabModes;
9228 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9229 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009230 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009231 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009232 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009233 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009234 count = IS_TYPE_MAX;
9235 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9236 for (size_t i = 0; i < count; i++) {
9237 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9238 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9239 eisSupported = true;
9240 break;
9241 }
9242 }
9243 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009244 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9245 }
9246 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9247 availableVstabModes.array(), availableVstabModes.size());
9248
9249 /*HAL 1 and HAL 3 common*/
9250 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9251 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9252 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009253 // Cap the max zoom to the max preferred value
9254 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009255 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9256 &maxZoom, 1);
9257
9258 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9259 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9260
9261 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9262 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9263 max3aRegions[2] = 0; /* AF not supported */
9264 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9265 max3aRegions, 3);
9266
9267 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9268 memset(prop, 0, sizeof(prop));
9269 property_get("persist.camera.facedetect", prop, "1");
9270 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9271 LOGD("Support face detection mode: %d",
9272 supportedFaceDetectMode);
9273
9274 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009275 /* support mode should be OFF if max number of face is 0 */
9276 if (maxFaces <= 0) {
9277 supportedFaceDetectMode = 0;
9278 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009279 Vector<uint8_t> availableFaceDetectModes;
9280 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9281 if (supportedFaceDetectMode == 1) {
9282 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9283 } else if (supportedFaceDetectMode == 2) {
9284 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9285 } else if (supportedFaceDetectMode == 3) {
9286 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9287 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9288 } else {
9289 maxFaces = 0;
9290 }
9291 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9292 availableFaceDetectModes.array(),
9293 availableFaceDetectModes.size());
9294 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9295 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009296 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9297 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9298 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009299
9300 int32_t exposureCompensationRange[] = {
9301 gCamCapability[cameraId]->exposure_compensation_min,
9302 gCamCapability[cameraId]->exposure_compensation_max};
9303 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9304 exposureCompensationRange,
9305 sizeof(exposureCompensationRange)/sizeof(int32_t));
9306
9307 uint8_t lensFacing = (facingBack) ?
9308 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9309 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9310
9311 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9312 available_thumbnail_sizes,
9313 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9314
9315 /*all sizes will be clubbed into this tag*/
9316 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9317 /*android.scaler.availableStreamConfigurations*/
9318 Vector<int32_t> available_stream_configs;
9319 cam_dimension_t active_array_dim;
9320 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9321 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009322
9323 /*advertise list of input dimensions supported based on below property.
9324 By default all sizes upto 5MP will be advertised.
9325 Note that the setprop resolution format should be WxH.
9326 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9327 To list all supported sizes, setprop needs to be set with "0x0" */
9328 cam_dimension_t minInputSize = {2592,1944}; //5MP
9329 memset(prop, 0, sizeof(prop));
9330 property_get("persist.camera.input.minsize", prop, "2592x1944");
9331 if (strlen(prop) > 0) {
9332 char *saveptr = NULL;
9333 char *token = strtok_r(prop, "x", &saveptr);
9334 if (token != NULL) {
9335 minInputSize.width = atoi(token);
9336 }
9337 token = strtok_r(NULL, "x", &saveptr);
9338 if (token != NULL) {
9339 minInputSize.height = atoi(token);
9340 }
9341 }
9342
Thierry Strudel3d639192016-09-09 11:52:26 -07009343 /* Add input/output stream configurations for each scalar formats*/
9344 for (size_t j = 0; j < scalar_formats_count; j++) {
9345 switch (scalar_formats[j]) {
9346 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9347 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9348 case HAL_PIXEL_FORMAT_RAW10:
9349 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9350 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9351 addStreamConfig(available_stream_configs, scalar_formats[j],
9352 gCamCapability[cameraId]->raw_dim[i],
9353 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9354 }
9355 break;
9356 case HAL_PIXEL_FORMAT_BLOB:
9357 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9358 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9359 addStreamConfig(available_stream_configs, scalar_formats[j],
9360 gCamCapability[cameraId]->picture_sizes_tbl[i],
9361 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9362 }
9363 break;
9364 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9365 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9366 default:
9367 cam_dimension_t largest_picture_size;
9368 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9369 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9370 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9371 addStreamConfig(available_stream_configs, scalar_formats[j],
9372 gCamCapability[cameraId]->picture_sizes_tbl[i],
9373 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009374 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
9375 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9376 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
9377 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9378 >= minInputSize.width) || (gCamCapability[cameraId]->
9379 picture_sizes_tbl[i].height >= minInputSize.height)) {
9380 addStreamConfig(available_stream_configs, scalar_formats[j],
9381 gCamCapability[cameraId]->picture_sizes_tbl[i],
9382 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9383 }
9384 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009385 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009386
Thierry Strudel3d639192016-09-09 11:52:26 -07009387 break;
9388 }
9389 }
9390
9391 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9392 available_stream_configs.array(), available_stream_configs.size());
9393 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9394 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9395
9396 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9397 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9398
9399 /* android.scaler.availableMinFrameDurations */
9400 Vector<int64_t> available_min_durations;
9401 for (size_t j = 0; j < scalar_formats_count; j++) {
9402 switch (scalar_formats[j]) {
9403 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9404 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9405 case HAL_PIXEL_FORMAT_RAW10:
9406 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9407 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9408 available_min_durations.add(scalar_formats[j]);
9409 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9410 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9411 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9412 }
9413 break;
9414 default:
9415 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9416 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9417 available_min_durations.add(scalar_formats[j]);
9418 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9419 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9420 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9421 }
9422 break;
9423 }
9424 }
9425 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9426 available_min_durations.array(), available_min_durations.size());
9427
9428 Vector<int32_t> available_hfr_configs;
9429 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9430 int32_t fps = 0;
9431 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9432 case CAM_HFR_MODE_60FPS:
9433 fps = 60;
9434 break;
9435 case CAM_HFR_MODE_90FPS:
9436 fps = 90;
9437 break;
9438 case CAM_HFR_MODE_120FPS:
9439 fps = 120;
9440 break;
9441 case CAM_HFR_MODE_150FPS:
9442 fps = 150;
9443 break;
9444 case CAM_HFR_MODE_180FPS:
9445 fps = 180;
9446 break;
9447 case CAM_HFR_MODE_210FPS:
9448 fps = 210;
9449 break;
9450 case CAM_HFR_MODE_240FPS:
9451 fps = 240;
9452 break;
9453 case CAM_HFR_MODE_480FPS:
9454 fps = 480;
9455 break;
9456 case CAM_HFR_MODE_OFF:
9457 case CAM_HFR_MODE_MAX:
9458 default:
9459 break;
9460 }
9461
9462 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9463 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9464 /* For each HFR frame rate, need to advertise one variable fps range
9465 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9466 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9467 * set by the app. When video recording is started, [120, 120] is
9468 * set. This way sensor configuration does not change when recording
9469 * is started */
9470
9471 /* (width, height, fps_min, fps_max, batch_size_max) */
9472 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9473 j < MAX_SIZES_CNT; j++) {
9474 available_hfr_configs.add(
9475 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9476 available_hfr_configs.add(
9477 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9478 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9479 available_hfr_configs.add(fps);
9480 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9481
9482 /* (width, height, fps_min, fps_max, batch_size_max) */
9483 available_hfr_configs.add(
9484 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9485 available_hfr_configs.add(
9486 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9487 available_hfr_configs.add(fps);
9488 available_hfr_configs.add(fps);
9489 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9490 }
9491 }
9492 }
9493 //Advertise HFR capability only if the property is set
9494 memset(prop, 0, sizeof(prop));
9495 property_get("persist.camera.hal3hfr.enable", prop, "1");
9496 uint8_t hfrEnable = (uint8_t)atoi(prop);
9497
9498 if(hfrEnable && available_hfr_configs.array()) {
9499 staticInfo.update(
9500 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9501 available_hfr_configs.array(), available_hfr_configs.size());
9502 }
9503
9504 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9505 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9506 &max_jpeg_size, 1);
9507
9508 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9509 size_t size = 0;
9510 count = CAM_EFFECT_MODE_MAX;
9511 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9512 for (size_t i = 0; i < count; i++) {
9513 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9514 gCamCapability[cameraId]->supported_effects[i]);
9515 if (NAME_NOT_FOUND != val) {
9516 avail_effects[size] = (uint8_t)val;
9517 size++;
9518 }
9519 }
9520 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9521 avail_effects,
9522 size);
9523
9524 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9525 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9526 size_t supported_scene_modes_cnt = 0;
9527 count = CAM_SCENE_MODE_MAX;
9528 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9529 for (size_t i = 0; i < count; i++) {
9530 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9531 CAM_SCENE_MODE_OFF) {
9532 int val = lookupFwkName(SCENE_MODES_MAP,
9533 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9534 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009535
Thierry Strudel3d639192016-09-09 11:52:26 -07009536 if (NAME_NOT_FOUND != val) {
9537 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9538 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9539 supported_scene_modes_cnt++;
9540 }
9541 }
9542 }
9543 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9544 avail_scene_modes,
9545 supported_scene_modes_cnt);
9546
9547 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9548 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9549 supported_scene_modes_cnt,
9550 CAM_SCENE_MODE_MAX,
9551 scene_mode_overrides,
9552 supported_indexes,
9553 cameraId);
9554
9555 if (supported_scene_modes_cnt == 0) {
9556 supported_scene_modes_cnt = 1;
9557 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9558 }
9559
9560 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9561 scene_mode_overrides, supported_scene_modes_cnt * 3);
9562
9563 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9564 ANDROID_CONTROL_MODE_AUTO,
9565 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9566 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9567 available_control_modes,
9568 3);
9569
9570 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9571 size = 0;
9572 count = CAM_ANTIBANDING_MODE_MAX;
9573 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9574 for (size_t i = 0; i < count; i++) {
9575 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9576 gCamCapability[cameraId]->supported_antibandings[i]);
9577 if (NAME_NOT_FOUND != val) {
9578 avail_antibanding_modes[size] = (uint8_t)val;
9579 size++;
9580 }
9581
9582 }
9583 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9584 avail_antibanding_modes,
9585 size);
9586
9587 uint8_t avail_abberation_modes[] = {
9588 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9589 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9590 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9591 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9592 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9593 if (0 == count) {
9594 // If no aberration correction modes are available for a device, this advertise OFF mode
9595 size = 1;
9596 } else {
9597 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9598 // So, advertize all 3 modes if atleast any one mode is supported as per the
9599 // new M requirement
9600 size = 3;
9601 }
9602 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9603 avail_abberation_modes,
9604 size);
9605
9606 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9607 size = 0;
9608 count = CAM_FOCUS_MODE_MAX;
9609 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9610 for (size_t i = 0; i < count; i++) {
9611 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9612 gCamCapability[cameraId]->supported_focus_modes[i]);
9613 if (NAME_NOT_FOUND != val) {
9614 avail_af_modes[size] = (uint8_t)val;
9615 size++;
9616 }
9617 }
9618 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9619 avail_af_modes,
9620 size);
9621
9622 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9623 size = 0;
9624 count = CAM_WB_MODE_MAX;
9625 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9626 for (size_t i = 0; i < count; i++) {
9627 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9628 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9629 gCamCapability[cameraId]->supported_white_balances[i]);
9630 if (NAME_NOT_FOUND != val) {
9631 avail_awb_modes[size] = (uint8_t)val;
9632 size++;
9633 }
9634 }
9635 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9636 avail_awb_modes,
9637 size);
9638
9639 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9640 count = CAM_FLASH_FIRING_LEVEL_MAX;
9641 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9642 count);
9643 for (size_t i = 0; i < count; i++) {
9644 available_flash_levels[i] =
9645 gCamCapability[cameraId]->supported_firing_levels[i];
9646 }
9647 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9648 available_flash_levels, count);
9649
9650 uint8_t flashAvailable;
9651 if (gCamCapability[cameraId]->flash_available)
9652 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9653 else
9654 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9655 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9656 &flashAvailable, 1);
9657
9658 Vector<uint8_t> avail_ae_modes;
9659 count = CAM_AE_MODE_MAX;
9660 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9661 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009662 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9663 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9664 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9665 }
9666 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009667 }
9668 if (flashAvailable) {
9669 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9670 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9671 }
9672 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9673 avail_ae_modes.array(),
9674 avail_ae_modes.size());
9675
9676 int32_t sensitivity_range[2];
9677 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9678 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9679 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9680 sensitivity_range,
9681 sizeof(sensitivity_range) / sizeof(int32_t));
9682
9683 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9684 &gCamCapability[cameraId]->max_analog_sensitivity,
9685 1);
9686
9687 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
9688 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
9689 &sensor_orientation,
9690 1);
9691
9692 int32_t max_output_streams[] = {
9693 MAX_STALLING_STREAMS,
9694 MAX_PROCESSED_STREAMS,
9695 MAX_RAW_STREAMS};
9696 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
9697 max_output_streams,
9698 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
9699
9700 uint8_t avail_leds = 0;
9701 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
9702 &avail_leds, 0);
9703
9704 uint8_t focus_dist_calibrated;
9705 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
9706 gCamCapability[cameraId]->focus_dist_calibrated);
9707 if (NAME_NOT_FOUND != val) {
9708 focus_dist_calibrated = (uint8_t)val;
9709 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9710 &focus_dist_calibrated, 1);
9711 }
9712
9713 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
9714 size = 0;
9715 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
9716 MAX_TEST_PATTERN_CNT);
9717 for (size_t i = 0; i < count; i++) {
9718 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
9719 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
9720 if (NAME_NOT_FOUND != testpatternMode) {
9721 avail_testpattern_modes[size] = testpatternMode;
9722 size++;
9723 }
9724 }
9725 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9726 avail_testpattern_modes,
9727 size);
9728
9729 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
9730 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
9731 &max_pipeline_depth,
9732 1);
9733
9734 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
9735 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9736 &partial_result_count,
9737 1);
9738
9739 int32_t max_stall_duration = MAX_REPROCESS_STALL;
9740 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
9741
9742 Vector<uint8_t> available_capabilities;
9743 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
9744 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
9745 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
9746 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
9747 if (supportBurst) {
9748 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
9749 }
9750 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
9751 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
9752 if (hfrEnable && available_hfr_configs.array()) {
9753 available_capabilities.add(
9754 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
9755 }
9756
9757 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
9758 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
9759 }
9760 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9761 available_capabilities.array(),
9762 available_capabilities.size());
9763
9764 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
9765 //Assumption is that all bayer cameras support MANUAL_SENSOR.
9766 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9767 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
9768
9769 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9770 &aeLockAvailable, 1);
9771
9772 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
9773 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
9774 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
9775 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
9776
9777 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9778 &awbLockAvailable, 1);
9779
9780 int32_t max_input_streams = 1;
9781 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9782 &max_input_streams,
9783 1);
9784
9785 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
9786 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
9787 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
9788 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
9789 HAL_PIXEL_FORMAT_YCbCr_420_888};
9790 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9791 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
9792
9793 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
9794 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
9795 &max_latency,
9796 1);
9797
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009798#ifndef USE_HAL_3_3
9799 int32_t isp_sensitivity_range[2];
9800 isp_sensitivity_range[0] =
9801 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
9802 isp_sensitivity_range[1] =
9803 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
9804 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9805 isp_sensitivity_range,
9806 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
9807#endif
9808
Thierry Strudel3d639192016-09-09 11:52:26 -07009809 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
9810 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
9811 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9812 available_hot_pixel_modes,
9813 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
9814
9815 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
9816 ANDROID_SHADING_MODE_FAST,
9817 ANDROID_SHADING_MODE_HIGH_QUALITY};
9818 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
9819 available_shading_modes,
9820 3);
9821
9822 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
9823 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
9824 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9825 available_lens_shading_map_modes,
9826 2);
9827
9828 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
9829 ANDROID_EDGE_MODE_FAST,
9830 ANDROID_EDGE_MODE_HIGH_QUALITY,
9831 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
9832 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9833 available_edge_modes,
9834 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
9835
9836 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
9837 ANDROID_NOISE_REDUCTION_MODE_FAST,
9838 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
9839 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
9840 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
9841 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9842 available_noise_red_modes,
9843 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
9844
9845 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
9846 ANDROID_TONEMAP_MODE_FAST,
9847 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
9848 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9849 available_tonemap_modes,
9850 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9851
9852 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9853 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9854 available_hot_pixel_map_modes,
9855 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9856
9857 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9858 gCamCapability[cameraId]->reference_illuminant1);
9859 if (NAME_NOT_FOUND != val) {
9860 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9861 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9862 }
9863
9864 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9865 gCamCapability[cameraId]->reference_illuminant2);
9866 if (NAME_NOT_FOUND != val) {
9867 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9868 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9869 }
9870
9871 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9872 (void *)gCamCapability[cameraId]->forward_matrix1,
9873 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9874
9875 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9876 (void *)gCamCapability[cameraId]->forward_matrix2,
9877 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9878
9879 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9880 (void *)gCamCapability[cameraId]->color_transform1,
9881 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9882
9883 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9884 (void *)gCamCapability[cameraId]->color_transform2,
9885 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9886
9887 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9888 (void *)gCamCapability[cameraId]->calibration_transform1,
9889 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9890
9891 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9892 (void *)gCamCapability[cameraId]->calibration_transform2,
9893 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9894
9895 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9896 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9897 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9898 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9899 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9900 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9901 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9902 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9903 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9904 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9905 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9906 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9907 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9908 ANDROID_JPEG_GPS_COORDINATES,
9909 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9910 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9911 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9912 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9913 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9914 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9915 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9916 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9917 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9918 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009919#ifndef USE_HAL_3_3
9920 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9921#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009922 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009923 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009924 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9925 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009926 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009927 /* DevCamDebug metadata request_keys_basic */
9928 DEVCAMDEBUG_META_ENABLE,
9929 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -08009930 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07009931 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
9932 NEXUS_EXPERIMENTAL_2017_SENSOR_MODE_FULLFOV
Samuel Ha68ba5172016-12-15 18:41:12 -08009933 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009934
9935 size_t request_keys_cnt =
9936 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9937 Vector<int32_t> available_request_keys;
9938 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9939 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9940 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9941 }
9942
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07009943 if (gExposeEnableZslKey) {
9944 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
9945 }
9946
Thierry Strudel3d639192016-09-09 11:52:26 -07009947 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9948 available_request_keys.array(), available_request_keys.size());
9949
9950 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9951 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9952 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9953 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9954 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9955 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9956 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9957 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9958 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9959 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9960 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9961 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9962 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9963 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9964 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9965 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9966 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -08009967 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -07009968 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9969 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9970 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009971 ANDROID_STATISTICS_FACE_SCORES,
9972#ifndef USE_HAL_3_3
9973 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9974#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07009975 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -07009976 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009977 // DevCamDebug metadata result_keys_basic
9978 DEVCAMDEBUG_META_ENABLE,
9979 // DevCamDebug metadata result_keys AF
9980 DEVCAMDEBUG_AF_LENS_POSITION,
9981 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9982 DEVCAMDEBUG_AF_TOF_DISTANCE,
9983 DEVCAMDEBUG_AF_LUMA,
9984 DEVCAMDEBUG_AF_HAF_STATE,
9985 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9986 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9987 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9988 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9989 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9990 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9991 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9992 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9993 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9994 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9995 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9996 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9997 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9998 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9999 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10000 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10001 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10002 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10003 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10004 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10005 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10006 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10007 // DevCamDebug metadata result_keys AEC
10008 DEVCAMDEBUG_AEC_TARGET_LUMA,
10009 DEVCAMDEBUG_AEC_COMP_LUMA,
10010 DEVCAMDEBUG_AEC_AVG_LUMA,
10011 DEVCAMDEBUG_AEC_CUR_LUMA,
10012 DEVCAMDEBUG_AEC_LINECOUNT,
10013 DEVCAMDEBUG_AEC_REAL_GAIN,
10014 DEVCAMDEBUG_AEC_EXP_INDEX,
10015 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010016 // DevCamDebug metadata result_keys zzHDR
10017 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10018 DEVCAMDEBUG_AEC_L_LINECOUNT,
10019 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10020 DEVCAMDEBUG_AEC_S_LINECOUNT,
10021 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10022 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10023 // DevCamDebug metadata result_keys ADRC
10024 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10025 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10026 DEVCAMDEBUG_AEC_GTM_RATIO,
10027 DEVCAMDEBUG_AEC_LTM_RATIO,
10028 DEVCAMDEBUG_AEC_LA_RATIO,
10029 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Ha68ba5172016-12-15 18:41:12 -080010030 // DevCamDebug metadata result_keys AWB
10031 DEVCAMDEBUG_AWB_R_GAIN,
10032 DEVCAMDEBUG_AWB_G_GAIN,
10033 DEVCAMDEBUG_AWB_B_GAIN,
10034 DEVCAMDEBUG_AWB_CCT,
10035 DEVCAMDEBUG_AWB_DECISION,
10036 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010037 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10038 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10039 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010040 };
10041
Thierry Strudel3d639192016-09-09 11:52:26 -070010042 size_t result_keys_cnt =
10043 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10044
10045 Vector<int32_t> available_result_keys;
10046 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10047 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10048 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10049 }
10050 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10051 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10052 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10053 }
10054 if (supportedFaceDetectMode == 1) {
10055 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10056 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10057 } else if ((supportedFaceDetectMode == 2) ||
10058 (supportedFaceDetectMode == 3)) {
10059 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10060 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10061 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010062#ifndef USE_HAL_3_3
10063 if (hasBlackRegions) {
10064 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10065 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10066 }
10067#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010068
10069 if (gExposeEnableZslKey) {
10070 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
10071 }
10072
Thierry Strudel3d639192016-09-09 11:52:26 -070010073 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10074 available_result_keys.array(), available_result_keys.size());
10075
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010076 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010077 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10078 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10079 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10080 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10081 ANDROID_SCALER_CROPPING_TYPE,
10082 ANDROID_SYNC_MAX_LATENCY,
10083 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10084 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10085 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10086 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10087 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10088 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10089 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10090 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10091 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10092 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10093 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10094 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10095 ANDROID_LENS_FACING,
10096 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10097 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10098 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10099 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10100 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10101 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10102 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10103 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10104 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10105 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10106 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10107 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10108 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10109 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10110 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10111 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10112 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10113 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10114 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10115 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010116 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010117 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10118 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10119 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10120 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10121 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10122 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10123 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10124 ANDROID_CONTROL_AVAILABLE_MODES,
10125 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10126 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10127 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10128 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010129 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10130#ifndef USE_HAL_3_3
10131 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10132 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10133#endif
10134 };
10135
10136 Vector<int32_t> available_characteristics_keys;
10137 available_characteristics_keys.appendArray(characteristics_keys_basic,
10138 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10139#ifndef USE_HAL_3_3
10140 if (hasBlackRegions) {
10141 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10142 }
10143#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010144
10145 if (0 <= indexPD) {
10146 int32_t depthKeys[] = {
10147 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10148 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10149 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10150 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10151 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10152 };
10153 available_characteristics_keys.appendArray(depthKeys,
10154 sizeof(depthKeys) / sizeof(depthKeys[0]));
10155 }
10156
Thierry Strudel3d639192016-09-09 11:52:26 -070010157 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010158 available_characteristics_keys.array(),
10159 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010160
10161 /*available stall durations depend on the hw + sw and will be different for different devices */
10162 /*have to add for raw after implementation*/
10163 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10164 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10165
10166 Vector<int64_t> available_stall_durations;
10167 for (uint32_t j = 0; j < stall_formats_count; j++) {
10168 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10169 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10170 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10171 available_stall_durations.add(stall_formats[j]);
10172 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10173 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10174 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10175 }
10176 } else {
10177 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10178 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10179 available_stall_durations.add(stall_formats[j]);
10180 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10181 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10182 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10183 }
10184 }
10185 }
10186 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10187 available_stall_durations.array(),
10188 available_stall_durations.size());
10189
10190 //QCAMERA3_OPAQUE_RAW
10191 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10192 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10193 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10194 case LEGACY_RAW:
10195 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10196 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10197 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10198 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10199 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10200 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10201 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10202 break;
10203 case MIPI_RAW:
10204 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10205 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10206 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10207 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10208 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10209 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10210 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10211 break;
10212 default:
10213 LOGE("unknown opaque_raw_format %d",
10214 gCamCapability[cameraId]->opaque_raw_fmt);
10215 break;
10216 }
10217 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10218
10219 Vector<int32_t> strides;
10220 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10221 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10222 cam_stream_buf_plane_info_t buf_planes;
10223 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10224 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10225 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10226 &gCamCapability[cameraId]->padding_info, &buf_planes);
10227 strides.add(buf_planes.plane_info.mp[0].stride);
10228 }
10229 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10230 strides.size());
10231
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010232 //TBD: remove the following line once backend advertises zzHDR in feature mask
10233 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010234 //Video HDR default
10235 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10236 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010237 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010238 int32_t vhdr_mode[] = {
10239 QCAMERA3_VIDEO_HDR_MODE_OFF,
10240 QCAMERA3_VIDEO_HDR_MODE_ON};
10241
10242 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10243 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10244 vhdr_mode, vhdr_mode_count);
10245 }
10246
Thierry Strudel3d639192016-09-09 11:52:26 -070010247 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10248 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10249 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10250
10251 uint8_t isMonoOnly =
10252 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10253 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10254 &isMonoOnly, 1);
10255
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010256#ifndef USE_HAL_3_3
10257 Vector<int32_t> opaque_size;
10258 for (size_t j = 0; j < scalar_formats_count; j++) {
10259 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10260 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10261 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10262 cam_stream_buf_plane_info_t buf_planes;
10263
10264 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10265 &gCamCapability[cameraId]->padding_info, &buf_planes);
10266
10267 if (rc == 0) {
10268 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10269 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10270 opaque_size.add(buf_planes.plane_info.frame_len);
10271 }else {
10272 LOGE("raw frame calculation failed!");
10273 }
10274 }
10275 }
10276 }
10277
10278 if ((opaque_size.size() > 0) &&
10279 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10280 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10281 else
10282 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10283#endif
10284
Thierry Strudel04e026f2016-10-10 11:27:36 -070010285 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10286 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10287 size = 0;
10288 count = CAM_IR_MODE_MAX;
10289 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10290 for (size_t i = 0; i < count; i++) {
10291 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10292 gCamCapability[cameraId]->supported_ir_modes[i]);
10293 if (NAME_NOT_FOUND != val) {
10294 avail_ir_modes[size] = (int32_t)val;
10295 size++;
10296 }
10297 }
10298 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10299 avail_ir_modes, size);
10300 }
10301
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010302 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10303 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10304 size = 0;
10305 count = CAM_AEC_CONVERGENCE_MAX;
10306 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10307 for (size_t i = 0; i < count; i++) {
10308 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10309 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10310 if (NAME_NOT_FOUND != val) {
10311 available_instant_aec_modes[size] = (int32_t)val;
10312 size++;
10313 }
10314 }
10315 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10316 available_instant_aec_modes, size);
10317 }
10318
Thierry Strudel54dc9782017-02-15 12:12:10 -080010319 int32_t sharpness_range[] = {
10320 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10321 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10322 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10323
10324 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10325 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10326 size = 0;
10327 count = CAM_BINNING_CORRECTION_MODE_MAX;
10328 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10329 for (size_t i = 0; i < count; i++) {
10330 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10331 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10332 gCamCapability[cameraId]->supported_binning_modes[i]);
10333 if (NAME_NOT_FOUND != val) {
10334 avail_binning_modes[size] = (int32_t)val;
10335 size++;
10336 }
10337 }
10338 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10339 avail_binning_modes, size);
10340 }
10341
10342 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10343 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10344 size = 0;
10345 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10346 for (size_t i = 0; i < count; i++) {
10347 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10348 gCamCapability[cameraId]->supported_aec_modes[i]);
10349 if (NAME_NOT_FOUND != val)
10350 available_aec_modes[size++] = val;
10351 }
10352 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10353 available_aec_modes, size);
10354 }
10355
10356 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10357 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10358 size = 0;
10359 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10360 for (size_t i = 0; i < count; i++) {
10361 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10362 gCamCapability[cameraId]->supported_iso_modes[i]);
10363 if (NAME_NOT_FOUND != val)
10364 available_iso_modes[size++] = val;
10365 }
10366 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10367 available_iso_modes, size);
10368 }
10369
10370 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
10371 for (size_t i = 0; i < count; i++)
10372 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10373 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10374 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10375
10376 int32_t available_saturation_range[4];
10377 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10378 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10379 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10380 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10381 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10382 available_saturation_range, 4);
10383
10384 uint8_t is_hdr_values[2];
10385 is_hdr_values[0] = 0;
10386 is_hdr_values[1] = 1;
10387 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10388 is_hdr_values, 2);
10389
10390 float is_hdr_confidence_range[2];
10391 is_hdr_confidence_range[0] = 0.0;
10392 is_hdr_confidence_range[1] = 1.0;
10393 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10394 is_hdr_confidence_range, 2);
10395
Emilian Peev0a972ef2017-03-16 10:25:53 +000010396 size_t eepromLength = strnlen(
10397 reinterpret_cast<const char *>(
10398 gCamCapability[cameraId]->eeprom_version_info),
10399 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10400 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010401 char easelInfo[] = ",E:N";
10402 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10403 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10404 eepromLength += sizeof(easelInfo);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010405 strlcat(eepromInfo, (gEaselManagerClient.isEaselPresentOnDevice() ? ",E:Y" : ",E:N"),
10406 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010407 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010408 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10409 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10410 }
10411
Thierry Strudel3d639192016-09-09 11:52:26 -070010412 gStaticMetadata[cameraId] = staticInfo.release();
10413 return rc;
10414}
10415
10416/*===========================================================================
10417 * FUNCTION : makeTable
10418 *
10419 * DESCRIPTION: make a table of sizes
10420 *
10421 * PARAMETERS :
10422 *
10423 *
10424 *==========================================================================*/
10425void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10426 size_t max_size, int32_t *sizeTable)
10427{
10428 size_t j = 0;
10429 if (size > max_size) {
10430 size = max_size;
10431 }
10432 for (size_t i = 0; i < size; i++) {
10433 sizeTable[j] = dimTable[i].width;
10434 sizeTable[j+1] = dimTable[i].height;
10435 j+=2;
10436 }
10437}
10438
10439/*===========================================================================
10440 * FUNCTION : makeFPSTable
10441 *
10442 * DESCRIPTION: make a table of fps ranges
10443 *
10444 * PARAMETERS :
10445 *
10446 *==========================================================================*/
10447void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10448 size_t max_size, int32_t *fpsRangesTable)
10449{
10450 size_t j = 0;
10451 if (size > max_size) {
10452 size = max_size;
10453 }
10454 for (size_t i = 0; i < size; i++) {
10455 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10456 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10457 j+=2;
10458 }
10459}
10460
10461/*===========================================================================
10462 * FUNCTION : makeOverridesList
10463 *
10464 * DESCRIPTION: make a list of scene mode overrides
10465 *
10466 * PARAMETERS :
10467 *
10468 *
10469 *==========================================================================*/
10470void QCamera3HardwareInterface::makeOverridesList(
10471 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10472 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10473{
10474 /*daemon will give a list of overrides for all scene modes.
10475 However we should send the fwk only the overrides for the scene modes
10476 supported by the framework*/
10477 size_t j = 0;
10478 if (size > max_size) {
10479 size = max_size;
10480 }
10481 size_t focus_count = CAM_FOCUS_MODE_MAX;
10482 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10483 focus_count);
10484 for (size_t i = 0; i < size; i++) {
10485 bool supt = false;
10486 size_t index = supported_indexes[i];
10487 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10488 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10489 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10490 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10491 overridesTable[index].awb_mode);
10492 if (NAME_NOT_FOUND != val) {
10493 overridesList[j+1] = (uint8_t)val;
10494 }
10495 uint8_t focus_override = overridesTable[index].af_mode;
10496 for (size_t k = 0; k < focus_count; k++) {
10497 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10498 supt = true;
10499 break;
10500 }
10501 }
10502 if (supt) {
10503 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10504 focus_override);
10505 if (NAME_NOT_FOUND != val) {
10506 overridesList[j+2] = (uint8_t)val;
10507 }
10508 } else {
10509 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10510 }
10511 j+=3;
10512 }
10513}
10514
10515/*===========================================================================
10516 * FUNCTION : filterJpegSizes
10517 *
10518 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10519 * could be downscaled to
10520 *
10521 * PARAMETERS :
10522 *
10523 * RETURN : length of jpegSizes array
10524 *==========================================================================*/
10525
10526size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10527 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10528 uint8_t downscale_factor)
10529{
10530 if (0 == downscale_factor) {
10531 downscale_factor = 1;
10532 }
10533
10534 int32_t min_width = active_array_size.width / downscale_factor;
10535 int32_t min_height = active_array_size.height / downscale_factor;
10536 size_t jpegSizesCnt = 0;
10537 if (processedSizesCnt > maxCount) {
10538 processedSizesCnt = maxCount;
10539 }
10540 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10541 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10542 jpegSizes[jpegSizesCnt] = processedSizes[i];
10543 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10544 jpegSizesCnt += 2;
10545 }
10546 }
10547 return jpegSizesCnt;
10548}
10549
10550/*===========================================================================
10551 * FUNCTION : computeNoiseModelEntryS
10552 *
10553 * DESCRIPTION: function to map a given sensitivity to the S noise
10554 * model parameters in the DNG noise model.
10555 *
10556 * PARAMETERS : sens : the sensor sensitivity
10557 *
10558 ** RETURN : S (sensor amplification) noise
10559 *
10560 *==========================================================================*/
10561double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10562 double s = gCamCapability[mCameraId]->gradient_S * sens +
10563 gCamCapability[mCameraId]->offset_S;
10564 return ((s < 0.0) ? 0.0 : s);
10565}
10566
10567/*===========================================================================
10568 * FUNCTION : computeNoiseModelEntryO
10569 *
10570 * DESCRIPTION: function to map a given sensitivity to the O noise
10571 * model parameters in the DNG noise model.
10572 *
10573 * PARAMETERS : sens : the sensor sensitivity
10574 *
10575 ** RETURN : O (sensor readout) noise
10576 *
10577 *==========================================================================*/
10578double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10579 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10580 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10581 1.0 : (1.0 * sens / max_analog_sens);
10582 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10583 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10584 return ((o < 0.0) ? 0.0 : o);
10585}
10586
10587/*===========================================================================
10588 * FUNCTION : getSensorSensitivity
10589 *
10590 * DESCRIPTION: convert iso_mode to an integer value
10591 *
10592 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10593 *
10594 ** RETURN : sensitivity supported by sensor
10595 *
10596 *==========================================================================*/
10597int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10598{
10599 int32_t sensitivity;
10600
10601 switch (iso_mode) {
10602 case CAM_ISO_MODE_100:
10603 sensitivity = 100;
10604 break;
10605 case CAM_ISO_MODE_200:
10606 sensitivity = 200;
10607 break;
10608 case CAM_ISO_MODE_400:
10609 sensitivity = 400;
10610 break;
10611 case CAM_ISO_MODE_800:
10612 sensitivity = 800;
10613 break;
10614 case CAM_ISO_MODE_1600:
10615 sensitivity = 1600;
10616 break;
10617 default:
10618 sensitivity = -1;
10619 break;
10620 }
10621 return sensitivity;
10622}
10623
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010624int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010625 if (!EaselManagerClientOpened && gEaselManagerClient.isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010626 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10627 // to connect to Easel.
10628 bool doNotpowerOnEasel =
10629 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10630
10631 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010632 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10633 return OK;
10634 }
10635
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010636 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010637 status_t res = gEaselManagerClient.open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010638 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010639 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010640 return res;
10641 }
10642
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010643 EaselManagerClientOpened = true;
10644
10645 res = gEaselManagerClient.suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010646 if (res != OK) {
10647 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10648 }
10649
10650 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010651 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010652
10653 // Expose enableZsl key only when HDR+ mode is enabled.
10654 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010655 }
10656
10657 return OK;
10658}
10659
Thierry Strudel3d639192016-09-09 11:52:26 -070010660/*===========================================================================
10661 * FUNCTION : getCamInfo
10662 *
10663 * DESCRIPTION: query camera capabilities
10664 *
10665 * PARAMETERS :
10666 * @cameraId : camera Id
10667 * @info : camera info struct to be filled in with camera capabilities
10668 *
10669 * RETURN : int type of status
10670 * NO_ERROR -- success
10671 * none-zero failure code
10672 *==========================================================================*/
10673int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
10674 struct camera_info *info)
10675{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010676 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070010677 int rc = 0;
10678
10679 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010680
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010681 {
10682 Mutex::Autolock l(gHdrPlusClientLock);
10683 rc = initHdrPlusClientLocked();
10684 if (rc != OK) {
10685 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
10686 pthread_mutex_unlock(&gCamLock);
10687 return rc;
10688 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070010689 }
10690
Thierry Strudel3d639192016-09-09 11:52:26 -070010691 if (NULL == gCamCapability[cameraId]) {
10692 rc = initCapabilities(cameraId);
10693 if (rc < 0) {
10694 pthread_mutex_unlock(&gCamLock);
10695 return rc;
10696 }
10697 }
10698
10699 if (NULL == gStaticMetadata[cameraId]) {
10700 rc = initStaticMetadata(cameraId);
10701 if (rc < 0) {
10702 pthread_mutex_unlock(&gCamLock);
10703 return rc;
10704 }
10705 }
10706
10707 switch(gCamCapability[cameraId]->position) {
10708 case CAM_POSITION_BACK:
10709 case CAM_POSITION_BACK_AUX:
10710 info->facing = CAMERA_FACING_BACK;
10711 break;
10712
10713 case CAM_POSITION_FRONT:
10714 case CAM_POSITION_FRONT_AUX:
10715 info->facing = CAMERA_FACING_FRONT;
10716 break;
10717
10718 default:
10719 LOGE("Unknown position type %d for camera id:%d",
10720 gCamCapability[cameraId]->position, cameraId);
10721 rc = -1;
10722 break;
10723 }
10724
10725
10726 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010727#ifndef USE_HAL_3_3
10728 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
10729#else
Thierry Strudel3d639192016-09-09 11:52:26 -070010730 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010731#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010732 info->static_camera_characteristics = gStaticMetadata[cameraId];
10733
10734 //For now assume both cameras can operate independently.
10735 info->conflicting_devices = NULL;
10736 info->conflicting_devices_length = 0;
10737
10738 //resource cost is 100 * MIN(1.0, m/M),
10739 //where m is throughput requirement with maximum stream configuration
10740 //and M is CPP maximum throughput.
10741 float max_fps = 0.0;
10742 for (uint32_t i = 0;
10743 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
10744 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
10745 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
10746 }
10747 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
10748 gCamCapability[cameraId]->active_array_size.width *
10749 gCamCapability[cameraId]->active_array_size.height * max_fps /
10750 gCamCapability[cameraId]->max_pixel_bandwidth;
10751 info->resource_cost = 100 * MIN(1.0, ratio);
10752 LOGI("camera %d resource cost is %d", cameraId,
10753 info->resource_cost);
10754
10755 pthread_mutex_unlock(&gCamLock);
10756 return rc;
10757}
10758
10759/*===========================================================================
10760 * FUNCTION : translateCapabilityToMetadata
10761 *
10762 * DESCRIPTION: translate the capability into camera_metadata_t
10763 *
10764 * PARAMETERS : type of the request
10765 *
10766 *
10767 * RETURN : success: camera_metadata_t*
10768 * failure: NULL
10769 *
10770 *==========================================================================*/
10771camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
10772{
10773 if (mDefaultMetadata[type] != NULL) {
10774 return mDefaultMetadata[type];
10775 }
10776 //first time we are handling this request
10777 //fill up the metadata structure using the wrapper class
10778 CameraMetadata settings;
10779 //translate from cam_capability_t to camera_metadata_tag_t
10780 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
10781 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
10782 int32_t defaultRequestID = 0;
10783 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
10784
10785 /* OIS disable */
10786 char ois_prop[PROPERTY_VALUE_MAX];
10787 memset(ois_prop, 0, sizeof(ois_prop));
10788 property_get("persist.camera.ois.disable", ois_prop, "0");
10789 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
10790
10791 /* Force video to use OIS */
10792 char videoOisProp[PROPERTY_VALUE_MAX];
10793 memset(videoOisProp, 0, sizeof(videoOisProp));
10794 property_get("persist.camera.ois.video", videoOisProp, "1");
10795 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080010796
10797 // Hybrid AE enable/disable
10798 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
10799 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
10800 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
10801 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
10802
Thierry Strudel3d639192016-09-09 11:52:26 -070010803 uint8_t controlIntent = 0;
10804 uint8_t focusMode;
10805 uint8_t vsMode;
10806 uint8_t optStabMode;
10807 uint8_t cacMode;
10808 uint8_t edge_mode;
10809 uint8_t noise_red_mode;
10810 uint8_t tonemap_mode;
10811 bool highQualityModeEntryAvailable = FALSE;
10812 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080010813 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070010814 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
10815 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010816 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010817 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010818 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080010819
Thierry Strudel3d639192016-09-09 11:52:26 -070010820 switch (type) {
10821 case CAMERA3_TEMPLATE_PREVIEW:
10822 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
10823 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10824 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10825 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10826 edge_mode = ANDROID_EDGE_MODE_FAST;
10827 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10828 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10829 break;
10830 case CAMERA3_TEMPLATE_STILL_CAPTURE:
10831 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
10832 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10833 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10834 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
10835 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
10836 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
10837 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10838 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
10839 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10840 if (gCamCapability[mCameraId]->aberration_modes[i] ==
10841 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10842 highQualityModeEntryAvailable = TRUE;
10843 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
10844 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10845 fastModeEntryAvailable = TRUE;
10846 }
10847 }
10848 if (highQualityModeEntryAvailable) {
10849 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
10850 } else if (fastModeEntryAvailable) {
10851 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10852 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010853 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10854 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10855 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010856 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070010857 break;
10858 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10859 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
10860 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10861 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010862 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10863 edge_mode = ANDROID_EDGE_MODE_FAST;
10864 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10865 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10866 if (forceVideoOis)
10867 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10868 break;
10869 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
10870 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
10871 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
10872 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070010873 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10874 edge_mode = ANDROID_EDGE_MODE_FAST;
10875 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10876 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10877 if (forceVideoOis)
10878 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10879 break;
10880 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
10881 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
10882 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10883 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10884 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10885 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
10886 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
10887 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10888 break;
10889 case CAMERA3_TEMPLATE_MANUAL:
10890 edge_mode = ANDROID_EDGE_MODE_FAST;
10891 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10892 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10893 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10894 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
10895 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10896 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10897 break;
10898 default:
10899 edge_mode = ANDROID_EDGE_MODE_FAST;
10900 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
10901 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
10902 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
10903 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
10904 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
10905 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10906 break;
10907 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070010908 // Set CAC to OFF if underlying device doesn't support
10909 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10910 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
10911 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010912 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
10913 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
10914 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
10915 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
10916 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
10917 }
10918 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080010919 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070010920 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010921
10922 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10923 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
10924 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
10925 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
10926 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
10927 || ois_disable)
10928 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
10929 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070010930 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010931
10932 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10933 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
10934
10935 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
10936 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
10937
10938 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
10939 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
10940
10941 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
10942 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
10943
10944 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
10945 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
10946
10947 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
10948 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
10949
10950 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
10951 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
10952
10953 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
10954 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
10955
10956 /*flash*/
10957 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
10958 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
10959
10960 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
10961 settings.update(ANDROID_FLASH_FIRING_POWER,
10962 &flashFiringLevel, 1);
10963
10964 /* lens */
10965 float default_aperture = gCamCapability[mCameraId]->apertures[0];
10966 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
10967
10968 if (gCamCapability[mCameraId]->filter_densities_count) {
10969 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
10970 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
10971 gCamCapability[mCameraId]->filter_densities_count);
10972 }
10973
10974 float default_focal_length = gCamCapability[mCameraId]->focal_length;
10975 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
10976
Thierry Strudel3d639192016-09-09 11:52:26 -070010977 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
10978 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
10979
10980 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
10981 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
10982
10983 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
10984 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
10985
10986 /* face detection (default to OFF) */
10987 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
10988 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
10989
Thierry Strudel54dc9782017-02-15 12:12:10 -080010990 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
10991 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070010992
10993 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
10994 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
10995
10996 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
10997 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
10998
Thierry Strudel3d639192016-09-09 11:52:26 -070010999
11000 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11001 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11002
11003 /* Exposure time(Update the Min Exposure Time)*/
11004 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11005 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11006
11007 /* frame duration */
11008 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11009 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11010
11011 /* sensitivity */
11012 static const int32_t default_sensitivity = 100;
11013 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011014#ifndef USE_HAL_3_3
11015 static const int32_t default_isp_sensitivity =
11016 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11017 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11018#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011019
11020 /*edge mode*/
11021 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11022
11023 /*noise reduction mode*/
11024 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11025
11026 /*color correction mode*/
11027 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11028 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11029
11030 /*transform matrix mode*/
11031 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11032
11033 int32_t scaler_crop_region[4];
11034 scaler_crop_region[0] = 0;
11035 scaler_crop_region[1] = 0;
11036 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11037 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11038 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11039
11040 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11041 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11042
11043 /*focus distance*/
11044 float focus_distance = 0.0;
11045 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11046
11047 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011048 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011049 float max_range = 0.0;
11050 float max_fixed_fps = 0.0;
11051 int32_t fps_range[2] = {0, 0};
11052 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11053 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011054 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11055 TEMPLATE_MAX_PREVIEW_FPS) {
11056 continue;
11057 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011058 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11059 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11060 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11061 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11062 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11063 if (range > max_range) {
11064 fps_range[0] =
11065 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11066 fps_range[1] =
11067 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11068 max_range = range;
11069 }
11070 } else {
11071 if (range < 0.01 && max_fixed_fps <
11072 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11073 fps_range[0] =
11074 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11075 fps_range[1] =
11076 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11077 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11078 }
11079 }
11080 }
11081 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11082
11083 /*precapture trigger*/
11084 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11085 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11086
11087 /*af trigger*/
11088 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11089 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11090
11091 /* ae & af regions */
11092 int32_t active_region[] = {
11093 gCamCapability[mCameraId]->active_array_size.left,
11094 gCamCapability[mCameraId]->active_array_size.top,
11095 gCamCapability[mCameraId]->active_array_size.left +
11096 gCamCapability[mCameraId]->active_array_size.width,
11097 gCamCapability[mCameraId]->active_array_size.top +
11098 gCamCapability[mCameraId]->active_array_size.height,
11099 0};
11100 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11101 sizeof(active_region) / sizeof(active_region[0]));
11102 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11103 sizeof(active_region) / sizeof(active_region[0]));
11104
11105 /* black level lock */
11106 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11107 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11108
Thierry Strudel3d639192016-09-09 11:52:26 -070011109 //special defaults for manual template
11110 if (type == CAMERA3_TEMPLATE_MANUAL) {
11111 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11112 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11113
11114 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11115 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11116
11117 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11118 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11119
11120 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11121 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11122
11123 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11124 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11125
11126 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11127 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11128 }
11129
11130
11131 /* TNR
11132 * We'll use this location to determine which modes TNR will be set.
11133 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11134 * This is not to be confused with linking on a per stream basis that decision
11135 * is still on per-session basis and will be handled as part of config stream
11136 */
11137 uint8_t tnr_enable = 0;
11138
11139 if (m_bTnrPreview || m_bTnrVideo) {
11140
11141 switch (type) {
11142 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11143 tnr_enable = 1;
11144 break;
11145
11146 default:
11147 tnr_enable = 0;
11148 break;
11149 }
11150
11151 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11152 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11153 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11154
11155 LOGD("TNR:%d with process plate %d for template:%d",
11156 tnr_enable, tnr_process_type, type);
11157 }
11158
11159 //Update Link tags to default
11160 int32_t sync_type = CAM_TYPE_STANDALONE;
11161 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11162
11163 int32_t is_main = 0; //this doesn't matter as app should overwrite
11164 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11165
11166 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
11167
11168 /* CDS default */
11169 char prop[PROPERTY_VALUE_MAX];
11170 memset(prop, 0, sizeof(prop));
11171 property_get("persist.camera.CDS", prop, "Auto");
11172 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11173 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11174 if (CAM_CDS_MODE_MAX == cds_mode) {
11175 cds_mode = CAM_CDS_MODE_AUTO;
11176 }
11177
11178 /* Disabling CDS in templates which have TNR enabled*/
11179 if (tnr_enable)
11180 cds_mode = CAM_CDS_MODE_OFF;
11181
11182 int32_t mode = cds_mode;
11183 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011184
Thierry Strudel269c81a2016-10-12 12:13:59 -070011185 /* Manual Convergence AEC Speed is disabled by default*/
11186 float default_aec_speed = 0;
11187 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11188
11189 /* Manual Convergence AWB Speed is disabled by default*/
11190 float default_awb_speed = 0;
11191 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11192
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011193 // Set instant AEC to normal convergence by default
11194 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11195 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11196
Shuzhen Wang19463d72016-03-08 11:09:52 -080011197 /* hybrid ae */
11198 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
11199
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011200 if (gExposeEnableZslKey) {
11201 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
11202 }
11203
Thierry Strudel3d639192016-09-09 11:52:26 -070011204 mDefaultMetadata[type] = settings.release();
11205
11206 return mDefaultMetadata[type];
11207}
11208
11209/*===========================================================================
11210 * FUNCTION : setFrameParameters
11211 *
11212 * DESCRIPTION: set parameters per frame as requested in the metadata from
11213 * framework
11214 *
11215 * PARAMETERS :
11216 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011217 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011218 * @blob_request: Whether this request is a blob request or not
11219 *
11220 * RETURN : success: NO_ERROR
11221 * failure:
11222 *==========================================================================*/
11223int QCamera3HardwareInterface::setFrameParameters(
11224 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011225 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011226 int blob_request,
11227 uint32_t snapshotStreamId)
11228{
11229 /*translate from camera_metadata_t type to parm_type_t*/
11230 int rc = 0;
11231 int32_t hal_version = CAM_HAL_V3;
11232
11233 clear_metadata_buffer(mParameters);
11234 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11235 LOGE("Failed to set hal version in the parameters");
11236 return BAD_VALUE;
11237 }
11238
11239 /*we need to update the frame number in the parameters*/
11240 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11241 request->frame_number)) {
11242 LOGE("Failed to set the frame number in the parameters");
11243 return BAD_VALUE;
11244 }
11245
11246 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011247 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011248 LOGE("Failed to set stream type mask in the parameters");
11249 return BAD_VALUE;
11250 }
11251
11252 if (mUpdateDebugLevel) {
11253 uint32_t dummyDebugLevel = 0;
11254 /* The value of dummyDebugLevel is irrelavent. On
11255 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11256 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11257 dummyDebugLevel)) {
11258 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11259 return BAD_VALUE;
11260 }
11261 mUpdateDebugLevel = false;
11262 }
11263
11264 if(request->settings != NULL){
11265 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11266 if (blob_request)
11267 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11268 }
11269
11270 return rc;
11271}
11272
11273/*===========================================================================
11274 * FUNCTION : setReprocParameters
11275 *
11276 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11277 * return it.
11278 *
11279 * PARAMETERS :
11280 * @request : request that needs to be serviced
11281 *
11282 * RETURN : success: NO_ERROR
11283 * failure:
11284 *==========================================================================*/
11285int32_t QCamera3HardwareInterface::setReprocParameters(
11286 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11287 uint32_t snapshotStreamId)
11288{
11289 /*translate from camera_metadata_t type to parm_type_t*/
11290 int rc = 0;
11291
11292 if (NULL == request->settings){
11293 LOGE("Reprocess settings cannot be NULL");
11294 return BAD_VALUE;
11295 }
11296
11297 if (NULL == reprocParam) {
11298 LOGE("Invalid reprocessing metadata buffer");
11299 return BAD_VALUE;
11300 }
11301 clear_metadata_buffer(reprocParam);
11302
11303 /*we need to update the frame number in the parameters*/
11304 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11305 request->frame_number)) {
11306 LOGE("Failed to set the frame number in the parameters");
11307 return BAD_VALUE;
11308 }
11309
11310 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11311 if (rc < 0) {
11312 LOGE("Failed to translate reproc request");
11313 return rc;
11314 }
11315
11316 CameraMetadata frame_settings;
11317 frame_settings = request->settings;
11318 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11319 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11320 int32_t *crop_count =
11321 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11322 int32_t *crop_data =
11323 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11324 int32_t *roi_map =
11325 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11326 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11327 cam_crop_data_t crop_meta;
11328 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11329 crop_meta.num_of_streams = 1;
11330 crop_meta.crop_info[0].crop.left = crop_data[0];
11331 crop_meta.crop_info[0].crop.top = crop_data[1];
11332 crop_meta.crop_info[0].crop.width = crop_data[2];
11333 crop_meta.crop_info[0].crop.height = crop_data[3];
11334
11335 crop_meta.crop_info[0].roi_map.left =
11336 roi_map[0];
11337 crop_meta.crop_info[0].roi_map.top =
11338 roi_map[1];
11339 crop_meta.crop_info[0].roi_map.width =
11340 roi_map[2];
11341 crop_meta.crop_info[0].roi_map.height =
11342 roi_map[3];
11343
11344 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11345 rc = BAD_VALUE;
11346 }
11347 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11348 request->input_buffer->stream,
11349 crop_meta.crop_info[0].crop.left,
11350 crop_meta.crop_info[0].crop.top,
11351 crop_meta.crop_info[0].crop.width,
11352 crop_meta.crop_info[0].crop.height);
11353 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11354 request->input_buffer->stream,
11355 crop_meta.crop_info[0].roi_map.left,
11356 crop_meta.crop_info[0].roi_map.top,
11357 crop_meta.crop_info[0].roi_map.width,
11358 crop_meta.crop_info[0].roi_map.height);
11359 } else {
11360 LOGE("Invalid reprocess crop count %d!", *crop_count);
11361 }
11362 } else {
11363 LOGE("No crop data from matching output stream");
11364 }
11365
11366 /* These settings are not needed for regular requests so handle them specially for
11367 reprocess requests; information needed for EXIF tags */
11368 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11369 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11370 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11371 if (NAME_NOT_FOUND != val) {
11372 uint32_t flashMode = (uint32_t)val;
11373 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11374 rc = BAD_VALUE;
11375 }
11376 } else {
11377 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11378 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11379 }
11380 } else {
11381 LOGH("No flash mode in reprocess settings");
11382 }
11383
11384 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11385 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11386 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11387 rc = BAD_VALUE;
11388 }
11389 } else {
11390 LOGH("No flash state in reprocess settings");
11391 }
11392
11393 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11394 uint8_t *reprocessFlags =
11395 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11396 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11397 *reprocessFlags)) {
11398 rc = BAD_VALUE;
11399 }
11400 }
11401
Thierry Strudel54dc9782017-02-15 12:12:10 -080011402 // Add exif debug data to internal metadata
11403 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11404 mm_jpeg_debug_exif_params_t *debug_params =
11405 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11406 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11407 // AE
11408 if (debug_params->ae_debug_params_valid == TRUE) {
11409 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11410 debug_params->ae_debug_params);
11411 }
11412 // AWB
11413 if (debug_params->awb_debug_params_valid == TRUE) {
11414 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11415 debug_params->awb_debug_params);
11416 }
11417 // AF
11418 if (debug_params->af_debug_params_valid == TRUE) {
11419 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11420 debug_params->af_debug_params);
11421 }
11422 // ASD
11423 if (debug_params->asd_debug_params_valid == TRUE) {
11424 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11425 debug_params->asd_debug_params);
11426 }
11427 // Stats
11428 if (debug_params->stats_debug_params_valid == TRUE) {
11429 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11430 debug_params->stats_debug_params);
11431 }
11432 // BE Stats
11433 if (debug_params->bestats_debug_params_valid == TRUE) {
11434 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11435 debug_params->bestats_debug_params);
11436 }
11437 // BHIST
11438 if (debug_params->bhist_debug_params_valid == TRUE) {
11439 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11440 debug_params->bhist_debug_params);
11441 }
11442 // 3A Tuning
11443 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11444 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11445 debug_params->q3a_tuning_debug_params);
11446 }
11447 }
11448
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011449 // Add metadata which reprocess needs
11450 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11451 cam_reprocess_info_t *repro_info =
11452 (cam_reprocess_info_t *)frame_settings.find
11453 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011454 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011455 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011456 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011457 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011458 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011459 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011460 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011461 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011462 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011463 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011464 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011465 repro_info->pipeline_flip);
11466 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11467 repro_info->af_roi);
11468 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11469 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011470 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11471 CAM_INTF_PARM_ROTATION metadata then has been added in
11472 translateToHalMetadata. HAL need to keep this new rotation
11473 metadata. Otherwise, the old rotation info saved in the vendor tag
11474 would be used */
11475 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11476 CAM_INTF_PARM_ROTATION, reprocParam) {
11477 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11478 } else {
11479 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011480 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011481 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011482 }
11483
11484 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11485 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11486 roi.width and roi.height would be the final JPEG size.
11487 For now, HAL only checks this for reprocess request */
11488 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11489 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11490 uint8_t *enable =
11491 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11492 if (*enable == TRUE) {
11493 int32_t *crop_data =
11494 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11495 cam_stream_crop_info_t crop_meta;
11496 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11497 crop_meta.stream_id = 0;
11498 crop_meta.crop.left = crop_data[0];
11499 crop_meta.crop.top = crop_data[1];
11500 crop_meta.crop.width = crop_data[2];
11501 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011502 // The JPEG crop roi should match cpp output size
11503 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11504 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11505 crop_meta.roi_map.left = 0;
11506 crop_meta.roi_map.top = 0;
11507 crop_meta.roi_map.width = cpp_crop->crop.width;
11508 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011509 }
11510 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11511 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011512 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011513 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011514 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11515 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011516 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011517 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11518
11519 // Add JPEG scale information
11520 cam_dimension_t scale_dim;
11521 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11522 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11523 int32_t *roi =
11524 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11525 scale_dim.width = roi[2];
11526 scale_dim.height = roi[3];
11527 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11528 scale_dim);
11529 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11530 scale_dim.width, scale_dim.height, mCameraId);
11531 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011532 }
11533 }
11534
11535 return rc;
11536}
11537
11538/*===========================================================================
11539 * FUNCTION : saveRequestSettings
11540 *
11541 * DESCRIPTION: Add any settings that might have changed to the request settings
11542 * and save the settings to be applied on the frame
11543 *
11544 * PARAMETERS :
11545 * @jpegMetadata : the extracted and/or modified jpeg metadata
11546 * @request : request with initial settings
11547 *
11548 * RETURN :
11549 * camera_metadata_t* : pointer to the saved request settings
11550 *==========================================================================*/
11551camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11552 const CameraMetadata &jpegMetadata,
11553 camera3_capture_request_t *request)
11554{
11555 camera_metadata_t *resultMetadata;
11556 CameraMetadata camMetadata;
11557 camMetadata = request->settings;
11558
11559 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11560 int32_t thumbnail_size[2];
11561 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11562 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11563 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11564 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11565 }
11566
11567 if (request->input_buffer != NULL) {
11568 uint8_t reprocessFlags = 1;
11569 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
11570 (uint8_t*)&reprocessFlags,
11571 sizeof(reprocessFlags));
11572 }
11573
11574 resultMetadata = camMetadata.release();
11575 return resultMetadata;
11576}
11577
11578/*===========================================================================
11579 * FUNCTION : setHalFpsRange
11580 *
11581 * DESCRIPTION: set FPS range parameter
11582 *
11583 *
11584 * PARAMETERS :
11585 * @settings : Metadata from framework
11586 * @hal_metadata: Metadata buffer
11587 *
11588 *
11589 * RETURN : success: NO_ERROR
11590 * failure:
11591 *==========================================================================*/
11592int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
11593 metadata_buffer_t *hal_metadata)
11594{
11595 int32_t rc = NO_ERROR;
11596 cam_fps_range_t fps_range;
11597 fps_range.min_fps = (float)
11598 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
11599 fps_range.max_fps = (float)
11600 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
11601 fps_range.video_min_fps = fps_range.min_fps;
11602 fps_range.video_max_fps = fps_range.max_fps;
11603
11604 LOGD("aeTargetFpsRange fps: [%f %f]",
11605 fps_range.min_fps, fps_range.max_fps);
11606 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
11607 * follows:
11608 * ---------------------------------------------------------------|
11609 * Video stream is absent in configure_streams |
11610 * (Camcorder preview before the first video record |
11611 * ---------------------------------------------------------------|
11612 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11613 * | | | vid_min/max_fps|
11614 * ---------------------------------------------------------------|
11615 * NO | [ 30, 240] | 240 | [240, 240] |
11616 * |-------------|-------------|----------------|
11617 * | [240, 240] | 240 | [240, 240] |
11618 * ---------------------------------------------------------------|
11619 * Video stream is present in configure_streams |
11620 * ---------------------------------------------------------------|
11621 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
11622 * | | | vid_min/max_fps|
11623 * ---------------------------------------------------------------|
11624 * NO | [ 30, 240] | 240 | [240, 240] |
11625 * (camcorder prev |-------------|-------------|----------------|
11626 * after video rec | [240, 240] | 240 | [240, 240] |
11627 * is stopped) | | | |
11628 * ---------------------------------------------------------------|
11629 * YES | [ 30, 240] | 240 | [240, 240] |
11630 * |-------------|-------------|----------------|
11631 * | [240, 240] | 240 | [240, 240] |
11632 * ---------------------------------------------------------------|
11633 * When Video stream is absent in configure_streams,
11634 * preview fps = sensor_fps / batchsize
11635 * Eg: for 240fps at batchSize 4, preview = 60fps
11636 * for 120fps at batchSize 4, preview = 30fps
11637 *
11638 * When video stream is present in configure_streams, preview fps is as per
11639 * the ratio of preview buffers to video buffers requested in process
11640 * capture request
11641 */
11642 mBatchSize = 0;
11643 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
11644 fps_range.min_fps = fps_range.video_max_fps;
11645 fps_range.video_min_fps = fps_range.video_max_fps;
11646 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
11647 fps_range.max_fps);
11648 if (NAME_NOT_FOUND != val) {
11649 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
11650 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11651 return BAD_VALUE;
11652 }
11653
11654 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
11655 /* If batchmode is currently in progress and the fps changes,
11656 * set the flag to restart the sensor */
11657 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
11658 (mHFRVideoFps != fps_range.max_fps)) {
11659 mNeedSensorRestart = true;
11660 }
11661 mHFRVideoFps = fps_range.max_fps;
11662 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
11663 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
11664 mBatchSize = MAX_HFR_BATCH_SIZE;
11665 }
11666 }
11667 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
11668
11669 }
11670 } else {
11671 /* HFR mode is session param in backend/ISP. This should be reset when
11672 * in non-HFR mode */
11673 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
11674 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
11675 return BAD_VALUE;
11676 }
11677 }
11678 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
11679 return BAD_VALUE;
11680 }
11681 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
11682 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
11683 return rc;
11684}
11685
11686/*===========================================================================
11687 * FUNCTION : translateToHalMetadata
11688 *
11689 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
11690 *
11691 *
11692 * PARAMETERS :
11693 * @request : request sent from framework
11694 *
11695 *
11696 * RETURN : success: NO_ERROR
11697 * failure:
11698 *==========================================================================*/
11699int QCamera3HardwareInterface::translateToHalMetadata
11700 (const camera3_capture_request_t *request,
11701 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011702 uint32_t snapshotStreamId) {
11703 if (request == nullptr || hal_metadata == nullptr) {
11704 return BAD_VALUE;
11705 }
11706
11707 int64_t minFrameDuration = getMinFrameDuration(request);
11708
11709 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
11710 minFrameDuration);
11711}
11712
11713int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
11714 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
11715 uint32_t snapshotStreamId, int64_t minFrameDuration) {
11716
Thierry Strudel3d639192016-09-09 11:52:26 -070011717 int rc = 0;
11718 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080011719 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070011720
11721 /* Do not change the order of the following list unless you know what you are
11722 * doing.
11723 * The order is laid out in such a way that parameters in the front of the table
11724 * may be used to override the parameters later in the table. Examples are:
11725 * 1. META_MODE should precede AEC/AWB/AF MODE
11726 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
11727 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
11728 * 4. Any mode should precede it's corresponding settings
11729 */
11730 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
11731 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
11732 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
11733 rc = BAD_VALUE;
11734 }
11735 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
11736 if (rc != NO_ERROR) {
11737 LOGE("extractSceneMode failed");
11738 }
11739 }
11740
11741 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
11742 uint8_t fwk_aeMode =
11743 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
11744 uint8_t aeMode;
11745 int32_t redeye;
11746
11747 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
11748 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080011749 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
11750 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070011751 } else {
11752 aeMode = CAM_AE_MODE_ON;
11753 }
11754 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
11755 redeye = 1;
11756 } else {
11757 redeye = 0;
11758 }
11759
11760 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
11761 fwk_aeMode);
11762 if (NAME_NOT_FOUND != val) {
11763 int32_t flashMode = (int32_t)val;
11764 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
11765 }
11766
11767 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
11768 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
11769 rc = BAD_VALUE;
11770 }
11771 }
11772
11773 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
11774 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
11775 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
11776 fwk_whiteLevel);
11777 if (NAME_NOT_FOUND != val) {
11778 uint8_t whiteLevel = (uint8_t)val;
11779 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
11780 rc = BAD_VALUE;
11781 }
11782 }
11783 }
11784
11785 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
11786 uint8_t fwk_cacMode =
11787 frame_settings.find(
11788 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
11789 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
11790 fwk_cacMode);
11791 if (NAME_NOT_FOUND != val) {
11792 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
11793 bool entryAvailable = FALSE;
11794 // Check whether Frameworks set CAC mode is supported in device or not
11795 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11796 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
11797 entryAvailable = TRUE;
11798 break;
11799 }
11800 }
11801 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
11802 // If entry not found then set the device supported mode instead of frameworks mode i.e,
11803 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
11804 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
11805 if (entryAvailable == FALSE) {
11806 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11807 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11808 } else {
11809 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11810 // High is not supported and so set the FAST as spec say's underlying
11811 // device implementation can be the same for both modes.
11812 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
11813 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11814 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
11815 // in order to avoid the fps drop due to high quality
11816 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11817 } else {
11818 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
11819 }
11820 }
11821 }
11822 LOGD("Final cacMode is %d", cacMode);
11823 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
11824 rc = BAD_VALUE;
11825 }
11826 } else {
11827 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
11828 }
11829 }
11830
Thierry Strudel2896d122017-02-23 19:18:03 -080011831 char af_value[PROPERTY_VALUE_MAX];
11832 property_get("persist.camera.af.infinity", af_value, "0");
11833
Jason Lee84ae9972017-02-24 13:24:24 -080011834 uint8_t fwk_focusMode = 0;
Thierry Strudel2896d122017-02-23 19:18:03 -080011835 if (atoi(af_value) == 0) {
11836 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080011837 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080011838 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
11839 fwk_focusMode);
11840 if (NAME_NOT_FOUND != val) {
11841 uint8_t focusMode = (uint8_t)val;
11842 LOGD("set focus mode %d", focusMode);
11843 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11844 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11845 rc = BAD_VALUE;
11846 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011847 }
11848 }
Thierry Strudel2896d122017-02-23 19:18:03 -080011849 } else {
11850 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
11851 LOGE("Focus forced to infinity %d", focusMode);
11852 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
11853 rc = BAD_VALUE;
11854 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011855 }
11856
Jason Lee84ae9972017-02-24 13:24:24 -080011857 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
11858 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011859 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
11860 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
11861 focalDistance)) {
11862 rc = BAD_VALUE;
11863 }
11864 }
11865
11866 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
11867 uint8_t fwk_antibandingMode =
11868 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
11869 int val = lookupHalName(ANTIBANDING_MODES_MAP,
11870 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
11871 if (NAME_NOT_FOUND != val) {
11872 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070011873 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
11874 if (m60HzZone) {
11875 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
11876 } else {
11877 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
11878 }
11879 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011880 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
11881 hal_antibandingMode)) {
11882 rc = BAD_VALUE;
11883 }
11884 }
11885 }
11886
11887 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
11888 int32_t expCompensation = frame_settings.find(
11889 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
11890 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
11891 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
11892 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
11893 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080011894 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070011895 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
11896 expCompensation)) {
11897 rc = BAD_VALUE;
11898 }
11899 }
11900
11901 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
11902 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
11903 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
11904 rc = BAD_VALUE;
11905 }
11906 }
11907 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
11908 rc = setHalFpsRange(frame_settings, hal_metadata);
11909 if (rc != NO_ERROR) {
11910 LOGE("setHalFpsRange failed");
11911 }
11912 }
11913
11914 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
11915 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
11916 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
11917 rc = BAD_VALUE;
11918 }
11919 }
11920
11921 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
11922 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
11923 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
11924 fwk_effectMode);
11925 if (NAME_NOT_FOUND != val) {
11926 uint8_t effectMode = (uint8_t)val;
11927 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
11928 rc = BAD_VALUE;
11929 }
11930 }
11931 }
11932
11933 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
11934 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
11935 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
11936 colorCorrectMode)) {
11937 rc = BAD_VALUE;
11938 }
11939 }
11940
11941 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
11942 cam_color_correct_gains_t colorCorrectGains;
11943 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
11944 colorCorrectGains.gains[i] =
11945 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
11946 }
11947 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
11948 colorCorrectGains)) {
11949 rc = BAD_VALUE;
11950 }
11951 }
11952
11953 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
11954 cam_color_correct_matrix_t colorCorrectTransform;
11955 cam_rational_type_t transform_elem;
11956 size_t num = 0;
11957 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
11958 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
11959 transform_elem.numerator =
11960 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
11961 transform_elem.denominator =
11962 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
11963 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
11964 num++;
11965 }
11966 }
11967 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
11968 colorCorrectTransform)) {
11969 rc = BAD_VALUE;
11970 }
11971 }
11972
11973 cam_trigger_t aecTrigger;
11974 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
11975 aecTrigger.trigger_id = -1;
11976 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
11977 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
11978 aecTrigger.trigger =
11979 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
11980 aecTrigger.trigger_id =
11981 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
11982 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
11983 aecTrigger)) {
11984 rc = BAD_VALUE;
11985 }
11986 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
11987 aecTrigger.trigger, aecTrigger.trigger_id);
11988 }
11989
11990 /*af_trigger must come with a trigger id*/
11991 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
11992 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
11993 cam_trigger_t af_trigger;
11994 af_trigger.trigger =
11995 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
11996 af_trigger.trigger_id =
11997 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
11998 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
11999 rc = BAD_VALUE;
12000 }
12001 LOGD("AfTrigger: %d AfTriggerID: %d",
12002 af_trigger.trigger, af_trigger.trigger_id);
12003 }
12004
12005 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12006 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12007 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12008 rc = BAD_VALUE;
12009 }
12010 }
12011 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12012 cam_edge_application_t edge_application;
12013 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012014
Thierry Strudel3d639192016-09-09 11:52:26 -070012015 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12016 edge_application.sharpness = 0;
12017 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012018 edge_application.sharpness =
12019 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12020 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12021 int32_t sharpness =
12022 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12023 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12024 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12025 LOGD("Setting edge mode sharpness %d", sharpness);
12026 edge_application.sharpness = sharpness;
12027 }
12028 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012029 }
12030 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12031 rc = BAD_VALUE;
12032 }
12033 }
12034
12035 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12036 int32_t respectFlashMode = 1;
12037 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12038 uint8_t fwk_aeMode =
12039 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012040 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12041 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12042 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012043 respectFlashMode = 0;
12044 LOGH("AE Mode controls flash, ignore android.flash.mode");
12045 }
12046 }
12047 if (respectFlashMode) {
12048 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12049 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12050 LOGH("flash mode after mapping %d", val);
12051 // To check: CAM_INTF_META_FLASH_MODE usage
12052 if (NAME_NOT_FOUND != val) {
12053 uint8_t flashMode = (uint8_t)val;
12054 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12055 rc = BAD_VALUE;
12056 }
12057 }
12058 }
12059 }
12060
12061 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12062 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12063 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12064 rc = BAD_VALUE;
12065 }
12066 }
12067
12068 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12069 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12070 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12071 flashFiringTime)) {
12072 rc = BAD_VALUE;
12073 }
12074 }
12075
12076 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12077 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12078 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12079 hotPixelMode)) {
12080 rc = BAD_VALUE;
12081 }
12082 }
12083
12084 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12085 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12086 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12087 lensAperture)) {
12088 rc = BAD_VALUE;
12089 }
12090 }
12091
12092 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12093 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12094 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12095 filterDensity)) {
12096 rc = BAD_VALUE;
12097 }
12098 }
12099
12100 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12101 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12102 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12103 focalLength)) {
12104 rc = BAD_VALUE;
12105 }
12106 }
12107
12108 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12109 uint8_t optStabMode =
12110 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12111 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12112 optStabMode)) {
12113 rc = BAD_VALUE;
12114 }
12115 }
12116
12117 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12118 uint8_t videoStabMode =
12119 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12120 LOGD("videoStabMode from APP = %d", videoStabMode);
12121 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12122 videoStabMode)) {
12123 rc = BAD_VALUE;
12124 }
12125 }
12126
12127
12128 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12129 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12130 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12131 noiseRedMode)) {
12132 rc = BAD_VALUE;
12133 }
12134 }
12135
12136 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12137 float reprocessEffectiveExposureFactor =
12138 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12139 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12140 reprocessEffectiveExposureFactor)) {
12141 rc = BAD_VALUE;
12142 }
12143 }
12144
12145 cam_crop_region_t scalerCropRegion;
12146 bool scalerCropSet = false;
12147 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12148 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12149 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12150 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12151 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12152
12153 // Map coordinate system from active array to sensor output.
12154 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12155 scalerCropRegion.width, scalerCropRegion.height);
12156
12157 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12158 scalerCropRegion)) {
12159 rc = BAD_VALUE;
12160 }
12161 scalerCropSet = true;
12162 }
12163
12164 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12165 int64_t sensorExpTime =
12166 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12167 LOGD("setting sensorExpTime %lld", sensorExpTime);
12168 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12169 sensorExpTime)) {
12170 rc = BAD_VALUE;
12171 }
12172 }
12173
12174 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12175 int64_t sensorFrameDuration =
12176 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012177 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12178 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12179 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12180 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12181 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12182 sensorFrameDuration)) {
12183 rc = BAD_VALUE;
12184 }
12185 }
12186
12187 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12188 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12189 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12190 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12191 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12192 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12193 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12194 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12195 sensorSensitivity)) {
12196 rc = BAD_VALUE;
12197 }
12198 }
12199
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012200#ifndef USE_HAL_3_3
12201 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12202 int32_t ispSensitivity =
12203 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12204 if (ispSensitivity <
12205 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12206 ispSensitivity =
12207 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12208 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12209 }
12210 if (ispSensitivity >
12211 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12212 ispSensitivity =
12213 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12214 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12215 }
12216 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12217 ispSensitivity)) {
12218 rc = BAD_VALUE;
12219 }
12220 }
12221#endif
12222
Thierry Strudel3d639192016-09-09 11:52:26 -070012223 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12224 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12225 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12226 rc = BAD_VALUE;
12227 }
12228 }
12229
12230 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12231 uint8_t fwk_facedetectMode =
12232 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12233
12234 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12235 fwk_facedetectMode);
12236
12237 if (NAME_NOT_FOUND != val) {
12238 uint8_t facedetectMode = (uint8_t)val;
12239 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12240 facedetectMode)) {
12241 rc = BAD_VALUE;
12242 }
12243 }
12244 }
12245
Thierry Strudel54dc9782017-02-15 12:12:10 -080012246 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012247 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012248 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012249 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12250 histogramMode)) {
12251 rc = BAD_VALUE;
12252 }
12253 }
12254
12255 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12256 uint8_t sharpnessMapMode =
12257 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12258 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12259 sharpnessMapMode)) {
12260 rc = BAD_VALUE;
12261 }
12262 }
12263
12264 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12265 uint8_t tonemapMode =
12266 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12267 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12268 rc = BAD_VALUE;
12269 }
12270 }
12271 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12272 /*All tonemap channels will have the same number of points*/
12273 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12274 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12275 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12276 cam_rgb_tonemap_curves tonemapCurves;
12277 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12278 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12279 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12280 tonemapCurves.tonemap_points_cnt,
12281 CAM_MAX_TONEMAP_CURVE_SIZE);
12282 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12283 }
12284
12285 /* ch0 = G*/
12286 size_t point = 0;
12287 cam_tonemap_curve_t tonemapCurveGreen;
12288 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12289 for (size_t j = 0; j < 2; j++) {
12290 tonemapCurveGreen.tonemap_points[i][j] =
12291 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12292 point++;
12293 }
12294 }
12295 tonemapCurves.curves[0] = tonemapCurveGreen;
12296
12297 /* ch 1 = B */
12298 point = 0;
12299 cam_tonemap_curve_t tonemapCurveBlue;
12300 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12301 for (size_t j = 0; j < 2; j++) {
12302 tonemapCurveBlue.tonemap_points[i][j] =
12303 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12304 point++;
12305 }
12306 }
12307 tonemapCurves.curves[1] = tonemapCurveBlue;
12308
12309 /* ch 2 = R */
12310 point = 0;
12311 cam_tonemap_curve_t tonemapCurveRed;
12312 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12313 for (size_t j = 0; j < 2; j++) {
12314 tonemapCurveRed.tonemap_points[i][j] =
12315 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12316 point++;
12317 }
12318 }
12319 tonemapCurves.curves[2] = tonemapCurveRed;
12320
12321 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12322 tonemapCurves)) {
12323 rc = BAD_VALUE;
12324 }
12325 }
12326
12327 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12328 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12329 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12330 captureIntent)) {
12331 rc = BAD_VALUE;
12332 }
12333 }
12334
12335 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12336 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12337 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12338 blackLevelLock)) {
12339 rc = BAD_VALUE;
12340 }
12341 }
12342
12343 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12344 uint8_t lensShadingMapMode =
12345 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12346 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12347 lensShadingMapMode)) {
12348 rc = BAD_VALUE;
12349 }
12350 }
12351
12352 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12353 cam_area_t roi;
12354 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012355 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012356
12357 // Map coordinate system from active array to sensor output.
12358 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12359 roi.rect.height);
12360
12361 if (scalerCropSet) {
12362 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12363 }
12364 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12365 rc = BAD_VALUE;
12366 }
12367 }
12368
12369 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12370 cam_area_t roi;
12371 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012372 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012373
12374 // Map coordinate system from active array to sensor output.
12375 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12376 roi.rect.height);
12377
12378 if (scalerCropSet) {
12379 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12380 }
12381 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12382 rc = BAD_VALUE;
12383 }
12384 }
12385
12386 // CDS for non-HFR non-video mode
12387 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12388 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12389 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12390 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12391 LOGE("Invalid CDS mode %d!", *fwk_cds);
12392 } else {
12393 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12394 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12395 rc = BAD_VALUE;
12396 }
12397 }
12398 }
12399
Thierry Strudel04e026f2016-10-10 11:27:36 -070012400 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012401 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012402 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012403 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12404 }
12405 if (m_bVideoHdrEnabled)
12406 vhdr = CAM_VIDEO_HDR_MODE_ON;
12407
Thierry Strudel54dc9782017-02-15 12:12:10 -080012408 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12409
12410 if(vhdr != curr_hdr_state)
12411 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12412
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012413 rc = setVideoHdrMode(mParameters, vhdr);
12414 if (rc != NO_ERROR) {
12415 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012416 }
12417
12418 //IR
12419 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12420 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12421 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012422 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12423 uint8_t isIRon = 0;
12424
12425 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012426 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12427 LOGE("Invalid IR mode %d!", fwk_ir);
12428 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012429 if(isIRon != curr_ir_state )
12430 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12431
Thierry Strudel04e026f2016-10-10 11:27:36 -070012432 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12433 CAM_INTF_META_IR_MODE, fwk_ir)) {
12434 rc = BAD_VALUE;
12435 }
12436 }
12437 }
12438
Thierry Strudel54dc9782017-02-15 12:12:10 -080012439 //Binning Correction Mode
12440 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12441 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12442 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12443 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12444 || (0 > fwk_binning_correction)) {
12445 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12446 } else {
12447 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12448 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12449 rc = BAD_VALUE;
12450 }
12451 }
12452 }
12453
Thierry Strudel269c81a2016-10-12 12:13:59 -070012454 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12455 float aec_speed;
12456 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12457 LOGD("AEC Speed :%f", aec_speed);
12458 if ( aec_speed < 0 ) {
12459 LOGE("Invalid AEC mode %f!", aec_speed);
12460 } else {
12461 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12462 aec_speed)) {
12463 rc = BAD_VALUE;
12464 }
12465 }
12466 }
12467
12468 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12469 float awb_speed;
12470 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12471 LOGD("AWB Speed :%f", awb_speed);
12472 if ( awb_speed < 0 ) {
12473 LOGE("Invalid AWB mode %f!", awb_speed);
12474 } else {
12475 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12476 awb_speed)) {
12477 rc = BAD_VALUE;
12478 }
12479 }
12480 }
12481
Thierry Strudel3d639192016-09-09 11:52:26 -070012482 // TNR
12483 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12484 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12485 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012486 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012487 cam_denoise_param_t tnr;
12488 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12489 tnr.process_plates =
12490 (cam_denoise_process_type_t)frame_settings.find(
12491 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12492 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012493
12494 if(b_TnrRequested != curr_tnr_state)
12495 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12496
Thierry Strudel3d639192016-09-09 11:52:26 -070012497 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12498 rc = BAD_VALUE;
12499 }
12500 }
12501
Thierry Strudel54dc9782017-02-15 12:12:10 -080012502 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012503 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012504 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012505 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12506 *exposure_metering_mode)) {
12507 rc = BAD_VALUE;
12508 }
12509 }
12510
Thierry Strudel3d639192016-09-09 11:52:26 -070012511 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12512 int32_t fwk_testPatternMode =
12513 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12514 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12515 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12516
12517 if (NAME_NOT_FOUND != testPatternMode) {
12518 cam_test_pattern_data_t testPatternData;
12519 memset(&testPatternData, 0, sizeof(testPatternData));
12520 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12521 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12522 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12523 int32_t *fwk_testPatternData =
12524 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12525 testPatternData.r = fwk_testPatternData[0];
12526 testPatternData.b = fwk_testPatternData[3];
12527 switch (gCamCapability[mCameraId]->color_arrangement) {
12528 case CAM_FILTER_ARRANGEMENT_RGGB:
12529 case CAM_FILTER_ARRANGEMENT_GRBG:
12530 testPatternData.gr = fwk_testPatternData[1];
12531 testPatternData.gb = fwk_testPatternData[2];
12532 break;
12533 case CAM_FILTER_ARRANGEMENT_GBRG:
12534 case CAM_FILTER_ARRANGEMENT_BGGR:
12535 testPatternData.gr = fwk_testPatternData[2];
12536 testPatternData.gb = fwk_testPatternData[1];
12537 break;
12538 default:
12539 LOGE("color arrangement %d is not supported",
12540 gCamCapability[mCameraId]->color_arrangement);
12541 break;
12542 }
12543 }
12544 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12545 testPatternData)) {
12546 rc = BAD_VALUE;
12547 }
12548 } else {
12549 LOGE("Invalid framework sensor test pattern mode %d",
12550 fwk_testPatternMode);
12551 }
12552 }
12553
12554 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12555 size_t count = 0;
12556 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12557 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12558 gps_coords.data.d, gps_coords.count, count);
12559 if (gps_coords.count != count) {
12560 rc = BAD_VALUE;
12561 }
12562 }
12563
12564 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12565 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12566 size_t count = 0;
12567 const char *gps_methods_src = (const char *)
12568 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12569 memset(gps_methods, '\0', sizeof(gps_methods));
12570 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12571 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
12572 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
12573 if (GPS_PROCESSING_METHOD_SIZE != count) {
12574 rc = BAD_VALUE;
12575 }
12576 }
12577
12578 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
12579 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
12580 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
12581 gps_timestamp)) {
12582 rc = BAD_VALUE;
12583 }
12584 }
12585
12586 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
12587 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
12588 cam_rotation_info_t rotation_info;
12589 if (orientation == 0) {
12590 rotation_info.rotation = ROTATE_0;
12591 } else if (orientation == 90) {
12592 rotation_info.rotation = ROTATE_90;
12593 } else if (orientation == 180) {
12594 rotation_info.rotation = ROTATE_180;
12595 } else if (orientation == 270) {
12596 rotation_info.rotation = ROTATE_270;
12597 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070012598 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070012599 rotation_info.streamId = snapshotStreamId;
12600 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
12601 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
12602 rc = BAD_VALUE;
12603 }
12604 }
12605
12606 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
12607 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
12608 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
12609 rc = BAD_VALUE;
12610 }
12611 }
12612
12613 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
12614 uint32_t thumb_quality = (uint32_t)
12615 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
12616 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
12617 thumb_quality)) {
12618 rc = BAD_VALUE;
12619 }
12620 }
12621
12622 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12623 cam_dimension_t dim;
12624 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12625 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12626 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
12627 rc = BAD_VALUE;
12628 }
12629 }
12630
12631 // Internal metadata
12632 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
12633 size_t count = 0;
12634 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
12635 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
12636 privatedata.data.i32, privatedata.count, count);
12637 if (privatedata.count != count) {
12638 rc = BAD_VALUE;
12639 }
12640 }
12641
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012642 // ISO/Exposure Priority
12643 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
12644 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
12645 cam_priority_mode_t mode =
12646 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
12647 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
12648 cam_intf_parm_manual_3a_t use_iso_exp_pty;
12649 use_iso_exp_pty.previewOnly = FALSE;
12650 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
12651 use_iso_exp_pty.value = *ptr;
12652
12653 if(CAM_ISO_PRIORITY == mode) {
12654 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
12655 use_iso_exp_pty)) {
12656 rc = BAD_VALUE;
12657 }
12658 }
12659 else {
12660 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
12661 use_iso_exp_pty)) {
12662 rc = BAD_VALUE;
12663 }
12664 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080012665
12666 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
12667 rc = BAD_VALUE;
12668 }
12669 }
12670 } else {
12671 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
12672 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012673 }
12674 }
12675
12676 // Saturation
12677 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
12678 int32_t* use_saturation =
12679 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
12680 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
12681 rc = BAD_VALUE;
12682 }
12683 }
12684
Thierry Strudel3d639192016-09-09 11:52:26 -070012685 // EV step
12686 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
12687 gCamCapability[mCameraId]->exp_compensation_step)) {
12688 rc = BAD_VALUE;
12689 }
12690
12691 // CDS info
12692 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
12693 cam_cds_data_t *cdsData = (cam_cds_data_t *)
12694 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
12695
12696 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12697 CAM_INTF_META_CDS_DATA, *cdsData)) {
12698 rc = BAD_VALUE;
12699 }
12700 }
12701
Shuzhen Wang19463d72016-03-08 11:09:52 -080012702 // Hybrid AE
12703 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
12704 uint8_t *hybrid_ae = (uint8_t *)
12705 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
12706
12707 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12708 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
12709 rc = BAD_VALUE;
12710 }
12711 }
12712
Shuzhen Wang14415f52016-11-16 18:26:18 -080012713 // Histogram
12714 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
12715 uint8_t histogramMode =
12716 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
12717 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12718 histogramMode)) {
12719 rc = BAD_VALUE;
12720 }
12721 }
12722
12723 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
12724 int32_t histogramBins =
12725 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
12726 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
12727 histogramBins)) {
12728 rc = BAD_VALUE;
12729 }
12730 }
12731
Shuzhen Wangcc386c52017-03-29 09:28:08 -070012732 // Tracking AF
12733 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
12734 uint8_t trackingAfTrigger =
12735 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
12736 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
12737 trackingAfTrigger)) {
12738 rc = BAD_VALUE;
12739 }
12740 }
12741
Thierry Strudel3d639192016-09-09 11:52:26 -070012742 return rc;
12743}
12744
12745/*===========================================================================
12746 * FUNCTION : captureResultCb
12747 *
12748 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
12749 *
12750 * PARAMETERS :
12751 * @frame : frame information from mm-camera-interface
12752 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
12753 * @userdata: userdata
12754 *
12755 * RETURN : NONE
12756 *==========================================================================*/
12757void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
12758 camera3_stream_buffer_t *buffer,
12759 uint32_t frame_number, bool isInputBuffer, void *userdata)
12760{
12761 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12762 if (hw == NULL) {
12763 LOGE("Invalid hw %p", hw);
12764 return;
12765 }
12766
12767 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
12768 return;
12769}
12770
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012771/*===========================================================================
12772 * FUNCTION : setBufferErrorStatus
12773 *
12774 * DESCRIPTION: Callback handler for channels to report any buffer errors
12775 *
12776 * PARAMETERS :
12777 * @ch : Channel on which buffer error is reported from
12778 * @frame_number : frame number on which buffer error is reported on
12779 * @buffer_status : buffer error status
12780 * @userdata: userdata
12781 *
12782 * RETURN : NONE
12783 *==========================================================================*/
12784void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12785 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
12786{
12787 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
12788 if (hw == NULL) {
12789 LOGE("Invalid hw %p", hw);
12790 return;
12791 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012792
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012793 hw->setBufferErrorStatus(ch, frame_number, err);
12794 return;
12795}
12796
12797void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
12798 uint32_t frameNumber, camera3_buffer_status_t err)
12799{
12800 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
12801 pthread_mutex_lock(&mMutex);
12802
12803 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
12804 if (req.frame_number != frameNumber)
12805 continue;
12806 for (auto& k : req.mPendingBufferList) {
12807 if(k.stream->priv == ch) {
12808 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
12809 }
12810 }
12811 }
12812
12813 pthread_mutex_unlock(&mMutex);
12814 return;
12815}
Thierry Strudel3d639192016-09-09 11:52:26 -070012816/*===========================================================================
12817 * FUNCTION : initialize
12818 *
12819 * DESCRIPTION: Pass framework callback pointers to HAL
12820 *
12821 * PARAMETERS :
12822 *
12823 *
12824 * RETURN : Success : 0
12825 * Failure: -ENODEV
12826 *==========================================================================*/
12827
12828int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
12829 const camera3_callback_ops_t *callback_ops)
12830{
12831 LOGD("E");
12832 QCamera3HardwareInterface *hw =
12833 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12834 if (!hw) {
12835 LOGE("NULL camera device");
12836 return -ENODEV;
12837 }
12838
12839 int rc = hw->initialize(callback_ops);
12840 LOGD("X");
12841 return rc;
12842}
12843
12844/*===========================================================================
12845 * FUNCTION : configure_streams
12846 *
12847 * DESCRIPTION:
12848 *
12849 * PARAMETERS :
12850 *
12851 *
12852 * RETURN : Success: 0
12853 * Failure: -EINVAL (if stream configuration is invalid)
12854 * -ENODEV (fatal error)
12855 *==========================================================================*/
12856
12857int QCamera3HardwareInterface::configure_streams(
12858 const struct camera3_device *device,
12859 camera3_stream_configuration_t *stream_list)
12860{
12861 LOGD("E");
12862 QCamera3HardwareInterface *hw =
12863 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12864 if (!hw) {
12865 LOGE("NULL camera device");
12866 return -ENODEV;
12867 }
12868 int rc = hw->configureStreams(stream_list);
12869 LOGD("X");
12870 return rc;
12871}
12872
12873/*===========================================================================
12874 * FUNCTION : construct_default_request_settings
12875 *
12876 * DESCRIPTION: Configure a settings buffer to meet the required use case
12877 *
12878 * PARAMETERS :
12879 *
12880 *
12881 * RETURN : Success: Return valid metadata
12882 * Failure: Return NULL
12883 *==========================================================================*/
12884const camera_metadata_t* QCamera3HardwareInterface::
12885 construct_default_request_settings(const struct camera3_device *device,
12886 int type)
12887{
12888
12889 LOGD("E");
12890 camera_metadata_t* fwk_metadata = NULL;
12891 QCamera3HardwareInterface *hw =
12892 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12893 if (!hw) {
12894 LOGE("NULL camera device");
12895 return NULL;
12896 }
12897
12898 fwk_metadata = hw->translateCapabilityToMetadata(type);
12899
12900 LOGD("X");
12901 return fwk_metadata;
12902}
12903
12904/*===========================================================================
12905 * FUNCTION : process_capture_request
12906 *
12907 * DESCRIPTION:
12908 *
12909 * PARAMETERS :
12910 *
12911 *
12912 * RETURN :
12913 *==========================================================================*/
12914int QCamera3HardwareInterface::process_capture_request(
12915 const struct camera3_device *device,
12916 camera3_capture_request_t *request)
12917{
12918 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012919 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070012920 QCamera3HardwareInterface *hw =
12921 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12922 if (!hw) {
12923 LOGE("NULL camera device");
12924 return -EINVAL;
12925 }
12926
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012927 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070012928 LOGD("X");
12929 return rc;
12930}
12931
12932/*===========================================================================
12933 * FUNCTION : dump
12934 *
12935 * DESCRIPTION:
12936 *
12937 * PARAMETERS :
12938 *
12939 *
12940 * RETURN :
12941 *==========================================================================*/
12942
12943void QCamera3HardwareInterface::dump(
12944 const struct camera3_device *device, int fd)
12945{
12946 /* Log level property is read when "adb shell dumpsys media.camera" is
12947 called so that the log level can be controlled without restarting
12948 the media server */
12949 getLogLevel();
12950
12951 LOGD("E");
12952 QCamera3HardwareInterface *hw =
12953 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12954 if (!hw) {
12955 LOGE("NULL camera device");
12956 return;
12957 }
12958
12959 hw->dump(fd);
12960 LOGD("X");
12961 return;
12962}
12963
12964/*===========================================================================
12965 * FUNCTION : flush
12966 *
12967 * DESCRIPTION:
12968 *
12969 * PARAMETERS :
12970 *
12971 *
12972 * RETURN :
12973 *==========================================================================*/
12974
12975int QCamera3HardwareInterface::flush(
12976 const struct camera3_device *device)
12977{
12978 int rc;
12979 LOGD("E");
12980 QCamera3HardwareInterface *hw =
12981 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
12982 if (!hw) {
12983 LOGE("NULL camera device");
12984 return -EINVAL;
12985 }
12986
12987 pthread_mutex_lock(&hw->mMutex);
12988 // Validate current state
12989 switch (hw->mState) {
12990 case STARTED:
12991 /* valid state */
12992 break;
12993
12994 case ERROR:
12995 pthread_mutex_unlock(&hw->mMutex);
12996 hw->handleCameraDeviceError();
12997 return -ENODEV;
12998
12999 default:
13000 LOGI("Flush returned during state %d", hw->mState);
13001 pthread_mutex_unlock(&hw->mMutex);
13002 return 0;
13003 }
13004 pthread_mutex_unlock(&hw->mMutex);
13005
13006 rc = hw->flush(true /* restart channels */ );
13007 LOGD("X");
13008 return rc;
13009}
13010
13011/*===========================================================================
13012 * FUNCTION : close_camera_device
13013 *
13014 * DESCRIPTION:
13015 *
13016 * PARAMETERS :
13017 *
13018 *
13019 * RETURN :
13020 *==========================================================================*/
13021int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13022{
13023 int ret = NO_ERROR;
13024 QCamera3HardwareInterface *hw =
13025 reinterpret_cast<QCamera3HardwareInterface *>(
13026 reinterpret_cast<camera3_device_t *>(device)->priv);
13027 if (!hw) {
13028 LOGE("NULL camera device");
13029 return BAD_VALUE;
13030 }
13031
13032 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13033 delete hw;
13034 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013035 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013036 return ret;
13037}
13038
13039/*===========================================================================
13040 * FUNCTION : getWaveletDenoiseProcessPlate
13041 *
13042 * DESCRIPTION: query wavelet denoise process plate
13043 *
13044 * PARAMETERS : None
13045 *
13046 * RETURN : WNR prcocess plate value
13047 *==========================================================================*/
13048cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13049{
13050 char prop[PROPERTY_VALUE_MAX];
13051 memset(prop, 0, sizeof(prop));
13052 property_get("persist.denoise.process.plates", prop, "0");
13053 int processPlate = atoi(prop);
13054 switch(processPlate) {
13055 case 0:
13056 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13057 case 1:
13058 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13059 case 2:
13060 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13061 case 3:
13062 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13063 default:
13064 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13065 }
13066}
13067
13068
13069/*===========================================================================
13070 * FUNCTION : getTemporalDenoiseProcessPlate
13071 *
13072 * DESCRIPTION: query temporal denoise process plate
13073 *
13074 * PARAMETERS : None
13075 *
13076 * RETURN : TNR prcocess plate value
13077 *==========================================================================*/
13078cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13079{
13080 char prop[PROPERTY_VALUE_MAX];
13081 memset(prop, 0, sizeof(prop));
13082 property_get("persist.tnr.process.plates", prop, "0");
13083 int processPlate = atoi(prop);
13084 switch(processPlate) {
13085 case 0:
13086 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13087 case 1:
13088 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13089 case 2:
13090 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13091 case 3:
13092 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13093 default:
13094 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13095 }
13096}
13097
13098
13099/*===========================================================================
13100 * FUNCTION : extractSceneMode
13101 *
13102 * DESCRIPTION: Extract scene mode from frameworks set metadata
13103 *
13104 * PARAMETERS :
13105 * @frame_settings: CameraMetadata reference
13106 * @metaMode: ANDROID_CONTORL_MODE
13107 * @hal_metadata: hal metadata structure
13108 *
13109 * RETURN : None
13110 *==========================================================================*/
13111int32_t QCamera3HardwareInterface::extractSceneMode(
13112 const CameraMetadata &frame_settings, uint8_t metaMode,
13113 metadata_buffer_t *hal_metadata)
13114{
13115 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013116 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13117
13118 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13119 LOGD("Ignoring control mode OFF_KEEP_STATE");
13120 return NO_ERROR;
13121 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013122
13123 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13124 camera_metadata_ro_entry entry =
13125 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13126 if (0 == entry.count)
13127 return rc;
13128
13129 uint8_t fwk_sceneMode = entry.data.u8[0];
13130
13131 int val = lookupHalName(SCENE_MODES_MAP,
13132 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13133 fwk_sceneMode);
13134 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013135 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013136 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013137 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013138 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013139
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013140 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13141 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13142 }
13143
13144 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13145 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013146 cam_hdr_param_t hdr_params;
13147 hdr_params.hdr_enable = 1;
13148 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13149 hdr_params.hdr_need_1x = false;
13150 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13151 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13152 rc = BAD_VALUE;
13153 }
13154 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013155
Thierry Strudel3d639192016-09-09 11:52:26 -070013156 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13157 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13158 rc = BAD_VALUE;
13159 }
13160 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013161
13162 if (mForceHdrSnapshot) {
13163 cam_hdr_param_t hdr_params;
13164 hdr_params.hdr_enable = 1;
13165 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13166 hdr_params.hdr_need_1x = false;
13167 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13168 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13169 rc = BAD_VALUE;
13170 }
13171 }
13172
Thierry Strudel3d639192016-09-09 11:52:26 -070013173 return rc;
13174}
13175
13176/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013177 * FUNCTION : setVideoHdrMode
13178 *
13179 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13180 *
13181 * PARAMETERS :
13182 * @hal_metadata: hal metadata structure
13183 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13184 *
13185 * RETURN : None
13186 *==========================================================================*/
13187int32_t QCamera3HardwareInterface::setVideoHdrMode(
13188 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13189{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013190 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13191 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13192 }
13193
13194 LOGE("Invalid Video HDR mode %d!", vhdr);
13195 return BAD_VALUE;
13196}
13197
13198/*===========================================================================
13199 * FUNCTION : setSensorHDR
13200 *
13201 * DESCRIPTION: Enable/disable sensor HDR.
13202 *
13203 * PARAMETERS :
13204 * @hal_metadata: hal metadata structure
13205 * @enable: boolean whether to enable/disable sensor HDR
13206 *
13207 * RETURN : None
13208 *==========================================================================*/
13209int32_t QCamera3HardwareInterface::setSensorHDR(
13210 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13211{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013212 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013213 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13214
13215 if (enable) {
13216 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13217 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13218 #ifdef _LE_CAMERA_
13219 //Default to staggered HDR for IOT
13220 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13221 #else
13222 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13223 #endif
13224 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13225 }
13226
13227 bool isSupported = false;
13228 switch (sensor_hdr) {
13229 case CAM_SENSOR_HDR_IN_SENSOR:
13230 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13231 CAM_QCOM_FEATURE_SENSOR_HDR) {
13232 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013233 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013234 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013235 break;
13236 case CAM_SENSOR_HDR_ZIGZAG:
13237 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13238 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13239 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013240 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013241 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013242 break;
13243 case CAM_SENSOR_HDR_STAGGERED:
13244 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13245 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13246 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013247 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013248 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013249 break;
13250 case CAM_SENSOR_HDR_OFF:
13251 isSupported = true;
13252 LOGD("Turning off sensor HDR");
13253 break;
13254 default:
13255 LOGE("HDR mode %d not supported", sensor_hdr);
13256 rc = BAD_VALUE;
13257 break;
13258 }
13259
13260 if(isSupported) {
13261 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13262 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13263 rc = BAD_VALUE;
13264 } else {
13265 if(!isVideoHdrEnable)
13266 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013267 }
13268 }
13269 return rc;
13270}
13271
13272/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013273 * FUNCTION : needRotationReprocess
13274 *
13275 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13276 *
13277 * PARAMETERS : none
13278 *
13279 * RETURN : true: needed
13280 * false: no need
13281 *==========================================================================*/
13282bool QCamera3HardwareInterface::needRotationReprocess()
13283{
13284 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13285 // current rotation is not zero, and pp has the capability to process rotation
13286 LOGH("need do reprocess for rotation");
13287 return true;
13288 }
13289
13290 return false;
13291}
13292
13293/*===========================================================================
13294 * FUNCTION : needReprocess
13295 *
13296 * DESCRIPTION: if reprocess in needed
13297 *
13298 * PARAMETERS : none
13299 *
13300 * RETURN : true: needed
13301 * false: no need
13302 *==========================================================================*/
13303bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13304{
13305 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13306 // TODO: add for ZSL HDR later
13307 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13308 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13309 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13310 return true;
13311 } else {
13312 LOGH("already post processed frame");
13313 return false;
13314 }
13315 }
13316 return needRotationReprocess();
13317}
13318
13319/*===========================================================================
13320 * FUNCTION : needJpegExifRotation
13321 *
13322 * DESCRIPTION: if rotation from jpeg is needed
13323 *
13324 * PARAMETERS : none
13325 *
13326 * RETURN : true: needed
13327 * false: no need
13328 *==========================================================================*/
13329bool QCamera3HardwareInterface::needJpegExifRotation()
13330{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013331 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013332 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13333 LOGD("Need use Jpeg EXIF Rotation");
13334 return true;
13335 }
13336 return false;
13337}
13338
13339/*===========================================================================
13340 * FUNCTION : addOfflineReprocChannel
13341 *
13342 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13343 * coming from input channel
13344 *
13345 * PARAMETERS :
13346 * @config : reprocess configuration
13347 * @inputChHandle : pointer to the input (source) channel
13348 *
13349 *
13350 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13351 *==========================================================================*/
13352QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13353 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13354{
13355 int32_t rc = NO_ERROR;
13356 QCamera3ReprocessChannel *pChannel = NULL;
13357
13358 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013359 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13360 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013361 if (NULL == pChannel) {
13362 LOGE("no mem for reprocess channel");
13363 return NULL;
13364 }
13365
13366 rc = pChannel->initialize(IS_TYPE_NONE);
13367 if (rc != NO_ERROR) {
13368 LOGE("init reprocess channel failed, ret = %d", rc);
13369 delete pChannel;
13370 return NULL;
13371 }
13372
13373 // pp feature config
13374 cam_pp_feature_config_t pp_config;
13375 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13376
13377 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13378 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13379 & CAM_QCOM_FEATURE_DSDN) {
13380 //Use CPP CDS incase h/w supports it.
13381 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13382 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13383 }
13384 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13385 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13386 }
13387
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013388 if (config.hdr_param.hdr_enable) {
13389 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13390 pp_config.hdr_param = config.hdr_param;
13391 }
13392
13393 if (mForceHdrSnapshot) {
13394 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13395 pp_config.hdr_param.hdr_enable = 1;
13396 pp_config.hdr_param.hdr_need_1x = 0;
13397 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13398 }
13399
Thierry Strudel3d639192016-09-09 11:52:26 -070013400 rc = pChannel->addReprocStreamsFromSource(pp_config,
13401 config,
13402 IS_TYPE_NONE,
13403 mMetadataChannel);
13404
13405 if (rc != NO_ERROR) {
13406 delete pChannel;
13407 return NULL;
13408 }
13409 return pChannel;
13410}
13411
13412/*===========================================================================
13413 * FUNCTION : getMobicatMask
13414 *
13415 * DESCRIPTION: returns mobicat mask
13416 *
13417 * PARAMETERS : none
13418 *
13419 * RETURN : mobicat mask
13420 *
13421 *==========================================================================*/
13422uint8_t QCamera3HardwareInterface::getMobicatMask()
13423{
13424 return m_MobicatMask;
13425}
13426
13427/*===========================================================================
13428 * FUNCTION : setMobicat
13429 *
13430 * DESCRIPTION: set Mobicat on/off.
13431 *
13432 * PARAMETERS :
13433 * @params : none
13434 *
13435 * RETURN : int32_t type of status
13436 * NO_ERROR -- success
13437 * none-zero failure code
13438 *==========================================================================*/
13439int32_t QCamera3HardwareInterface::setMobicat()
13440{
13441 char value [PROPERTY_VALUE_MAX];
13442 property_get("persist.camera.mobicat", value, "0");
13443 int32_t ret = NO_ERROR;
13444 uint8_t enableMobi = (uint8_t)atoi(value);
13445
13446 if (enableMobi) {
13447 tune_cmd_t tune_cmd;
13448 tune_cmd.type = SET_RELOAD_CHROMATIX;
13449 tune_cmd.module = MODULE_ALL;
13450 tune_cmd.value = TRUE;
13451 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13452 CAM_INTF_PARM_SET_VFE_COMMAND,
13453 tune_cmd);
13454
13455 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13456 CAM_INTF_PARM_SET_PP_COMMAND,
13457 tune_cmd);
13458 }
13459 m_MobicatMask = enableMobi;
13460
13461 return ret;
13462}
13463
13464/*===========================================================================
13465* FUNCTION : getLogLevel
13466*
13467* DESCRIPTION: Reads the log level property into a variable
13468*
13469* PARAMETERS :
13470* None
13471*
13472* RETURN :
13473* None
13474*==========================================================================*/
13475void QCamera3HardwareInterface::getLogLevel()
13476{
13477 char prop[PROPERTY_VALUE_MAX];
13478 uint32_t globalLogLevel = 0;
13479
13480 property_get("persist.camera.hal.debug", prop, "0");
13481 int val = atoi(prop);
13482 if (0 <= val) {
13483 gCamHal3LogLevel = (uint32_t)val;
13484 }
13485
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013486 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013487 gKpiDebugLevel = atoi(prop);
13488
13489 property_get("persist.camera.global.debug", prop, "0");
13490 val = atoi(prop);
13491 if (0 <= val) {
13492 globalLogLevel = (uint32_t)val;
13493 }
13494
13495 /* Highest log level among hal.logs and global.logs is selected */
13496 if (gCamHal3LogLevel < globalLogLevel)
13497 gCamHal3LogLevel = globalLogLevel;
13498
13499 return;
13500}
13501
13502/*===========================================================================
13503 * FUNCTION : validateStreamRotations
13504 *
13505 * DESCRIPTION: Check if the rotations requested are supported
13506 *
13507 * PARAMETERS :
13508 * @stream_list : streams to be configured
13509 *
13510 * RETURN : NO_ERROR on success
13511 * -EINVAL on failure
13512 *
13513 *==========================================================================*/
13514int QCamera3HardwareInterface::validateStreamRotations(
13515 camera3_stream_configuration_t *streamList)
13516{
13517 int rc = NO_ERROR;
13518
13519 /*
13520 * Loop through all streams requested in configuration
13521 * Check if unsupported rotations have been requested on any of them
13522 */
13523 for (size_t j = 0; j < streamList->num_streams; j++){
13524 camera3_stream_t *newStream = streamList->streams[j];
13525
13526 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13527 bool isImplDef = (newStream->format ==
13528 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13529 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13530 isImplDef);
13531
13532 if (isRotated && (!isImplDef || isZsl)) {
13533 LOGE("Error: Unsupported rotation of %d requested for stream"
13534 "type:%d and stream format:%d",
13535 newStream->rotation, newStream->stream_type,
13536 newStream->format);
13537 rc = -EINVAL;
13538 break;
13539 }
13540 }
13541
13542 return rc;
13543}
13544
13545/*===========================================================================
13546* FUNCTION : getFlashInfo
13547*
13548* DESCRIPTION: Retrieve information about whether the device has a flash.
13549*
13550* PARAMETERS :
13551* @cameraId : Camera id to query
13552* @hasFlash : Boolean indicating whether there is a flash device
13553* associated with given camera
13554* @flashNode : If a flash device exists, this will be its device node.
13555*
13556* RETURN :
13557* None
13558*==========================================================================*/
13559void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
13560 bool& hasFlash,
13561 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
13562{
13563 cam_capability_t* camCapability = gCamCapability[cameraId];
13564 if (NULL == camCapability) {
13565 hasFlash = false;
13566 flashNode[0] = '\0';
13567 } else {
13568 hasFlash = camCapability->flash_available;
13569 strlcpy(flashNode,
13570 (char*)camCapability->flash_dev_name,
13571 QCAMERA_MAX_FILEPATH_LENGTH);
13572 }
13573}
13574
13575/*===========================================================================
13576* FUNCTION : getEepromVersionInfo
13577*
13578* DESCRIPTION: Retrieve version info of the sensor EEPROM data
13579*
13580* PARAMETERS : None
13581*
13582* RETURN : string describing EEPROM version
13583* "\0" if no such info available
13584*==========================================================================*/
13585const char *QCamera3HardwareInterface::getEepromVersionInfo()
13586{
13587 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
13588}
13589
13590/*===========================================================================
13591* FUNCTION : getLdafCalib
13592*
13593* DESCRIPTION: Retrieve Laser AF calibration data
13594*
13595* PARAMETERS : None
13596*
13597* RETURN : Two uint32_t describing laser AF calibration data
13598* NULL if none is available.
13599*==========================================================================*/
13600const uint32_t *QCamera3HardwareInterface::getLdafCalib()
13601{
13602 if (mLdafCalibExist) {
13603 return &mLdafCalib[0];
13604 } else {
13605 return NULL;
13606 }
13607}
13608
13609/*===========================================================================
13610 * FUNCTION : dynamicUpdateMetaStreamInfo
13611 *
13612 * DESCRIPTION: This function:
13613 * (1) stops all the channels
13614 * (2) returns error on pending requests and buffers
13615 * (3) sends metastream_info in setparams
13616 * (4) starts all channels
13617 * This is useful when sensor has to be restarted to apply any
13618 * settings such as frame rate from a different sensor mode
13619 *
13620 * PARAMETERS : None
13621 *
13622 * RETURN : NO_ERROR on success
13623 * Error codes on failure
13624 *
13625 *==========================================================================*/
13626int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
13627{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013628 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070013629 int rc = NO_ERROR;
13630
13631 LOGD("E");
13632
13633 rc = stopAllChannels();
13634 if (rc < 0) {
13635 LOGE("stopAllChannels failed");
13636 return rc;
13637 }
13638
13639 rc = notifyErrorForPendingRequests();
13640 if (rc < 0) {
13641 LOGE("notifyErrorForPendingRequests failed");
13642 return rc;
13643 }
13644
13645 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
13646 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
13647 "Format:%d",
13648 mStreamConfigInfo.type[i],
13649 mStreamConfigInfo.stream_sizes[i].width,
13650 mStreamConfigInfo.stream_sizes[i].height,
13651 mStreamConfigInfo.postprocess_mask[i],
13652 mStreamConfigInfo.format[i]);
13653 }
13654
13655 /* Send meta stream info once again so that ISP can start */
13656 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13657 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
13658 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
13659 mParameters);
13660 if (rc < 0) {
13661 LOGE("set Metastreaminfo failed. Sensor mode does not change");
13662 }
13663
13664 rc = startAllChannels();
13665 if (rc < 0) {
13666 LOGE("startAllChannels failed");
13667 return rc;
13668 }
13669
13670 LOGD("X");
13671 return rc;
13672}
13673
13674/*===========================================================================
13675 * FUNCTION : stopAllChannels
13676 *
13677 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
13678 *
13679 * PARAMETERS : None
13680 *
13681 * RETURN : NO_ERROR on success
13682 * Error codes on failure
13683 *
13684 *==========================================================================*/
13685int32_t QCamera3HardwareInterface::stopAllChannels()
13686{
13687 int32_t rc = NO_ERROR;
13688
13689 LOGD("Stopping all channels");
13690 // Stop the Streams/Channels
13691 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13692 it != mStreamInfo.end(); it++) {
13693 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13694 if (channel) {
13695 channel->stop();
13696 }
13697 (*it)->status = INVALID;
13698 }
13699
13700 if (mSupportChannel) {
13701 mSupportChannel->stop();
13702 }
13703 if (mAnalysisChannel) {
13704 mAnalysisChannel->stop();
13705 }
13706 if (mRawDumpChannel) {
13707 mRawDumpChannel->stop();
13708 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013709 if (mHdrPlusRawSrcChannel) {
13710 mHdrPlusRawSrcChannel->stop();
13711 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013712 if (mMetadataChannel) {
13713 /* If content of mStreamInfo is not 0, there is metadata stream */
13714 mMetadataChannel->stop();
13715 }
13716
13717 LOGD("All channels stopped");
13718 return rc;
13719}
13720
13721/*===========================================================================
13722 * FUNCTION : startAllChannels
13723 *
13724 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
13725 *
13726 * PARAMETERS : None
13727 *
13728 * RETURN : NO_ERROR on success
13729 * Error codes on failure
13730 *
13731 *==========================================================================*/
13732int32_t QCamera3HardwareInterface::startAllChannels()
13733{
13734 int32_t rc = NO_ERROR;
13735
13736 LOGD("Start all channels ");
13737 // Start the Streams/Channels
13738 if (mMetadataChannel) {
13739 /* If content of mStreamInfo is not 0, there is metadata stream */
13740 rc = mMetadataChannel->start();
13741 if (rc < 0) {
13742 LOGE("META channel start failed");
13743 return rc;
13744 }
13745 }
13746 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13747 it != mStreamInfo.end(); it++) {
13748 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13749 if (channel) {
13750 rc = channel->start();
13751 if (rc < 0) {
13752 LOGE("channel start failed");
13753 return rc;
13754 }
13755 }
13756 }
13757 if (mAnalysisChannel) {
13758 mAnalysisChannel->start();
13759 }
13760 if (mSupportChannel) {
13761 rc = mSupportChannel->start();
13762 if (rc < 0) {
13763 LOGE("Support channel start failed");
13764 return rc;
13765 }
13766 }
13767 if (mRawDumpChannel) {
13768 rc = mRawDumpChannel->start();
13769 if (rc < 0) {
13770 LOGE("RAW dump channel start failed");
13771 return rc;
13772 }
13773 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013774 if (mHdrPlusRawSrcChannel) {
13775 rc = mHdrPlusRawSrcChannel->start();
13776 if (rc < 0) {
13777 LOGE("HDR+ RAW channel start failed");
13778 return rc;
13779 }
13780 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013781
13782 LOGD("All channels started");
13783 return rc;
13784}
13785
13786/*===========================================================================
13787 * FUNCTION : notifyErrorForPendingRequests
13788 *
13789 * DESCRIPTION: This function sends error for all the pending requests/buffers
13790 *
13791 * PARAMETERS : None
13792 *
13793 * RETURN : Error codes
13794 * NO_ERROR on success
13795 *
13796 *==========================================================================*/
13797int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
13798{
13799 int32_t rc = NO_ERROR;
13800 unsigned int frameNum = 0;
13801 camera3_capture_result_t result;
13802 camera3_stream_buffer_t *pStream_Buf = NULL;
13803
13804 memset(&result, 0, sizeof(camera3_capture_result_t));
13805
13806 if (mPendingRequestsList.size() > 0) {
13807 pendingRequestIterator i = mPendingRequestsList.begin();
13808 frameNum = i->frame_number;
13809 } else {
13810 /* There might still be pending buffers even though there are
13811 no pending requests. Setting the frameNum to MAX so that
13812 all the buffers with smaller frame numbers are returned */
13813 frameNum = UINT_MAX;
13814 }
13815
13816 LOGH("Oldest frame num on mPendingRequestsList = %u",
13817 frameNum);
13818
Emilian Peev7650c122017-01-19 08:24:33 -080013819 notifyErrorFoPendingDepthData(mDepthChannel);
13820
Thierry Strudel3d639192016-09-09 11:52:26 -070013821 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
13822 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
13823
13824 if (req->frame_number < frameNum) {
13825 // Send Error notify to frameworks for each buffer for which
13826 // metadata buffer is already sent
13827 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
13828 req->frame_number, req->mPendingBufferList.size());
13829
13830 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13831 if (NULL == pStream_Buf) {
13832 LOGE("No memory for pending buffers array");
13833 return NO_MEMORY;
13834 }
13835 memset(pStream_Buf, 0,
13836 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13837 result.result = NULL;
13838 result.frame_number = req->frame_number;
13839 result.num_output_buffers = req->mPendingBufferList.size();
13840 result.output_buffers = pStream_Buf;
13841
13842 size_t index = 0;
13843 for (auto info = req->mPendingBufferList.begin();
13844 info != req->mPendingBufferList.end(); ) {
13845
13846 camera3_notify_msg_t notify_msg;
13847 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13848 notify_msg.type = CAMERA3_MSG_ERROR;
13849 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
13850 notify_msg.message.error.error_stream = info->stream;
13851 notify_msg.message.error.frame_number = req->frame_number;
13852 pStream_Buf[index].acquire_fence = -1;
13853 pStream_Buf[index].release_fence = -1;
13854 pStream_Buf[index].buffer = info->buffer;
13855 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13856 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013857 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013858 index++;
13859 // Remove buffer from list
13860 info = req->mPendingBufferList.erase(info);
13861 }
13862
13863 // Remove this request from Map
13864 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13865 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13866 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13867
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013868 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013869
13870 delete [] pStream_Buf;
13871 } else {
13872
13873 // Go through the pending requests info and send error request to framework
13874 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
13875
13876 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
13877
13878 // Send error notify to frameworks
13879 camera3_notify_msg_t notify_msg;
13880 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
13881 notify_msg.type = CAMERA3_MSG_ERROR;
13882 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
13883 notify_msg.message.error.error_stream = NULL;
13884 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013885 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070013886
13887 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
13888 if (NULL == pStream_Buf) {
13889 LOGE("No memory for pending buffers array");
13890 return NO_MEMORY;
13891 }
13892 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
13893
13894 result.result = NULL;
13895 result.frame_number = req->frame_number;
13896 result.input_buffer = i->input_buffer;
13897 result.num_output_buffers = req->mPendingBufferList.size();
13898 result.output_buffers = pStream_Buf;
13899
13900 size_t index = 0;
13901 for (auto info = req->mPendingBufferList.begin();
13902 info != req->mPendingBufferList.end(); ) {
13903 pStream_Buf[index].acquire_fence = -1;
13904 pStream_Buf[index].release_fence = -1;
13905 pStream_Buf[index].buffer = info->buffer;
13906 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
13907 pStream_Buf[index].stream = info->stream;
13908 index++;
13909 // Remove buffer from list
13910 info = req->mPendingBufferList.erase(info);
13911 }
13912
13913 // Remove this request from Map
13914 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
13915 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
13916 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
13917
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013918 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070013919 delete [] pStream_Buf;
13920 i = erasePendingRequest(i);
13921 }
13922 }
13923
13924 /* Reset pending frame Drop list and requests list */
13925 mPendingFrameDropList.clear();
13926
13927 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
13928 req.mPendingBufferList.clear();
13929 }
13930 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070013931 LOGH("Cleared all the pending buffers ");
13932
13933 return rc;
13934}
13935
13936bool QCamera3HardwareInterface::isOnEncoder(
13937 const cam_dimension_t max_viewfinder_size,
13938 uint32_t width, uint32_t height)
13939{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013940 return ((width > (uint32_t)max_viewfinder_size.width) ||
13941 (height > (uint32_t)max_viewfinder_size.height) ||
13942 (width > (uint32_t)VIDEO_4K_WIDTH) ||
13943 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070013944}
13945
13946/*===========================================================================
13947 * FUNCTION : setBundleInfo
13948 *
13949 * DESCRIPTION: Set bundle info for all streams that are bundle.
13950 *
13951 * PARAMETERS : None
13952 *
13953 * RETURN : NO_ERROR on success
13954 * Error codes on failure
13955 *==========================================================================*/
13956int32_t QCamera3HardwareInterface::setBundleInfo()
13957{
13958 int32_t rc = NO_ERROR;
13959
13960 if (mChannelHandle) {
13961 cam_bundle_config_t bundleInfo;
13962 memset(&bundleInfo, 0, sizeof(bundleInfo));
13963 rc = mCameraHandle->ops->get_bundle_info(
13964 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
13965 if (rc != NO_ERROR) {
13966 LOGE("get_bundle_info failed");
13967 return rc;
13968 }
13969 if (mAnalysisChannel) {
13970 mAnalysisChannel->setBundleInfo(bundleInfo);
13971 }
13972 if (mSupportChannel) {
13973 mSupportChannel->setBundleInfo(bundleInfo);
13974 }
13975 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
13976 it != mStreamInfo.end(); it++) {
13977 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
13978 channel->setBundleInfo(bundleInfo);
13979 }
13980 if (mRawDumpChannel) {
13981 mRawDumpChannel->setBundleInfo(bundleInfo);
13982 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013983 if (mHdrPlusRawSrcChannel) {
13984 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
13985 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013986 }
13987
13988 return rc;
13989}
13990
13991/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013992 * FUNCTION : setInstantAEC
13993 *
13994 * DESCRIPTION: Set Instant AEC related params.
13995 *
13996 * PARAMETERS :
13997 * @meta: CameraMetadata reference
13998 *
13999 * RETURN : NO_ERROR on success
14000 * Error codes on failure
14001 *==========================================================================*/
14002int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14003{
14004 int32_t rc = NO_ERROR;
14005 uint8_t val = 0;
14006 char prop[PROPERTY_VALUE_MAX];
14007
14008 // First try to configure instant AEC from framework metadata
14009 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14010 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14011 }
14012
14013 // If framework did not set this value, try to read from set prop.
14014 if (val == 0) {
14015 memset(prop, 0, sizeof(prop));
14016 property_get("persist.camera.instant.aec", prop, "0");
14017 val = (uint8_t)atoi(prop);
14018 }
14019
14020 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14021 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14022 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14023 mInstantAEC = val;
14024 mInstantAECSettledFrameNumber = 0;
14025 mInstantAecFrameIdxCount = 0;
14026 LOGH("instantAEC value set %d",val);
14027 if (mInstantAEC) {
14028 memset(prop, 0, sizeof(prop));
14029 property_get("persist.camera.ae.instant.bound", prop, "10");
14030 int32_t aec_frame_skip_cnt = atoi(prop);
14031 if (aec_frame_skip_cnt >= 0) {
14032 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14033 } else {
14034 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14035 rc = BAD_VALUE;
14036 }
14037 }
14038 } else {
14039 LOGE("Bad instant aec value set %d", val);
14040 rc = BAD_VALUE;
14041 }
14042 return rc;
14043}
14044
14045/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014046 * FUNCTION : get_num_overall_buffers
14047 *
14048 * DESCRIPTION: Estimate number of pending buffers across all requests.
14049 *
14050 * PARAMETERS : None
14051 *
14052 * RETURN : Number of overall pending buffers
14053 *
14054 *==========================================================================*/
14055uint32_t PendingBuffersMap::get_num_overall_buffers()
14056{
14057 uint32_t sum_buffers = 0;
14058 for (auto &req : mPendingBuffersInRequest) {
14059 sum_buffers += req.mPendingBufferList.size();
14060 }
14061 return sum_buffers;
14062}
14063
14064/*===========================================================================
14065 * FUNCTION : removeBuf
14066 *
14067 * DESCRIPTION: Remove a matching buffer from tracker.
14068 *
14069 * PARAMETERS : @buffer: image buffer for the callback
14070 *
14071 * RETURN : None
14072 *
14073 *==========================================================================*/
14074void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14075{
14076 bool buffer_found = false;
14077 for (auto req = mPendingBuffersInRequest.begin();
14078 req != mPendingBuffersInRequest.end(); req++) {
14079 for (auto k = req->mPendingBufferList.begin();
14080 k != req->mPendingBufferList.end(); k++ ) {
14081 if (k->buffer == buffer) {
14082 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14083 req->frame_number, buffer);
14084 k = req->mPendingBufferList.erase(k);
14085 if (req->mPendingBufferList.empty()) {
14086 // Remove this request from Map
14087 req = mPendingBuffersInRequest.erase(req);
14088 }
14089 buffer_found = true;
14090 break;
14091 }
14092 }
14093 if (buffer_found) {
14094 break;
14095 }
14096 }
14097 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14098 get_num_overall_buffers());
14099}
14100
14101/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014102 * FUNCTION : getBufErrStatus
14103 *
14104 * DESCRIPTION: get buffer error status
14105 *
14106 * PARAMETERS : @buffer: buffer handle
14107 *
14108 * RETURN : Error status
14109 *
14110 *==========================================================================*/
14111int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14112{
14113 for (auto& req : mPendingBuffersInRequest) {
14114 for (auto& k : req.mPendingBufferList) {
14115 if (k.buffer == buffer)
14116 return k.bufStatus;
14117 }
14118 }
14119 return CAMERA3_BUFFER_STATUS_OK;
14120}
14121
14122/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014123 * FUNCTION : setPAAFSupport
14124 *
14125 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14126 * feature mask according to stream type and filter
14127 * arrangement
14128 *
14129 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14130 * @stream_type: stream type
14131 * @filter_arrangement: filter arrangement
14132 *
14133 * RETURN : None
14134 *==========================================================================*/
14135void QCamera3HardwareInterface::setPAAFSupport(
14136 cam_feature_mask_t& feature_mask,
14137 cam_stream_type_t stream_type,
14138 cam_color_filter_arrangement_t filter_arrangement)
14139{
Thierry Strudel3d639192016-09-09 11:52:26 -070014140 switch (filter_arrangement) {
14141 case CAM_FILTER_ARRANGEMENT_RGGB:
14142 case CAM_FILTER_ARRANGEMENT_GRBG:
14143 case CAM_FILTER_ARRANGEMENT_GBRG:
14144 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014145 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14146 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014147 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014148 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14149 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014150 }
14151 break;
14152 case CAM_FILTER_ARRANGEMENT_Y:
14153 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14154 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14155 }
14156 break;
14157 default:
14158 break;
14159 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014160 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14161 feature_mask, stream_type, filter_arrangement);
14162
14163
Thierry Strudel3d639192016-09-09 11:52:26 -070014164}
14165
14166/*===========================================================================
14167* FUNCTION : getSensorMountAngle
14168*
14169* DESCRIPTION: Retrieve sensor mount angle
14170*
14171* PARAMETERS : None
14172*
14173* RETURN : sensor mount angle in uint32_t
14174*==========================================================================*/
14175uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14176{
14177 return gCamCapability[mCameraId]->sensor_mount_angle;
14178}
14179
14180/*===========================================================================
14181* FUNCTION : getRelatedCalibrationData
14182*
14183* DESCRIPTION: Retrieve related system calibration data
14184*
14185* PARAMETERS : None
14186*
14187* RETURN : Pointer of related system calibration data
14188*==========================================================================*/
14189const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14190{
14191 return (const cam_related_system_calibration_data_t *)
14192 &(gCamCapability[mCameraId]->related_cam_calibration);
14193}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014194
14195/*===========================================================================
14196 * FUNCTION : is60HzZone
14197 *
14198 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14199 *
14200 * PARAMETERS : None
14201 *
14202 * RETURN : True if in 60Hz zone, False otherwise
14203 *==========================================================================*/
14204bool QCamera3HardwareInterface::is60HzZone()
14205{
14206 time_t t = time(NULL);
14207 struct tm lt;
14208
14209 struct tm* r = localtime_r(&t, &lt);
14210
14211 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14212 return true;
14213 else
14214 return false;
14215}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014216
14217/*===========================================================================
14218 * FUNCTION : adjustBlackLevelForCFA
14219 *
14220 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14221 * of bayer CFA (Color Filter Array).
14222 *
14223 * PARAMETERS : @input: black level pattern in the order of RGGB
14224 * @output: black level pattern in the order of CFA
14225 * @color_arrangement: CFA color arrangement
14226 *
14227 * RETURN : None
14228 *==========================================================================*/
14229template<typename T>
14230void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14231 T input[BLACK_LEVEL_PATTERN_CNT],
14232 T output[BLACK_LEVEL_PATTERN_CNT],
14233 cam_color_filter_arrangement_t color_arrangement)
14234{
14235 switch (color_arrangement) {
14236 case CAM_FILTER_ARRANGEMENT_GRBG:
14237 output[0] = input[1];
14238 output[1] = input[0];
14239 output[2] = input[3];
14240 output[3] = input[2];
14241 break;
14242 case CAM_FILTER_ARRANGEMENT_GBRG:
14243 output[0] = input[2];
14244 output[1] = input[3];
14245 output[2] = input[0];
14246 output[3] = input[1];
14247 break;
14248 case CAM_FILTER_ARRANGEMENT_BGGR:
14249 output[0] = input[3];
14250 output[1] = input[2];
14251 output[2] = input[1];
14252 output[3] = input[0];
14253 break;
14254 case CAM_FILTER_ARRANGEMENT_RGGB:
14255 output[0] = input[0];
14256 output[1] = input[1];
14257 output[2] = input[2];
14258 output[3] = input[3];
14259 break;
14260 default:
14261 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14262 break;
14263 }
14264}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014265
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014266void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14267 CameraMetadata &resultMetadata,
14268 std::shared_ptr<metadata_buffer_t> settings)
14269{
14270 if (settings == nullptr) {
14271 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14272 return;
14273 }
14274
14275 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14276 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14277 }
14278
14279 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14280 String8 str((const char *)gps_methods);
14281 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14282 }
14283
14284 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14285 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14286 }
14287
14288 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14289 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14290 }
14291
14292 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14293 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14294 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14295 }
14296
14297 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14298 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14299 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14300 }
14301
14302 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14303 int32_t fwk_thumb_size[2];
14304 fwk_thumb_size[0] = thumb_size->width;
14305 fwk_thumb_size[1] = thumb_size->height;
14306 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14307 }
14308
14309 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14310 uint8_t fwk_intent = intent[0];
14311 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14312 }
14313}
14314
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014315bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14316 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14317 const CameraMetadata &metadata)
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014318{
14319 if (hdrPlusRequest == nullptr) return false;
14320
14321 // Check noise reduction mode is high quality.
14322 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14323 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14324 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenee335912017-02-09 17:53:20 -080014325 ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
14326 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014327 return false;
14328 }
14329
14330 // Check edge mode is high quality.
14331 if (!metadata.exists(ANDROID_EDGE_MODE) ||
14332 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14333 ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
14334 return false;
14335 }
14336
14337 if (request.num_output_buffers != 1 ||
14338 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
14339 ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014340 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14341 ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14342 request.output_buffers[0].stream->width,
14343 request.output_buffers[0].stream->height,
14344 request.output_buffers[0].stream->format);
14345 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014346 return false;
14347 }
14348
14349 // Get a YUV buffer from pic channel.
14350 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14351 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14352 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14353 if (res != OK) {
14354 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14355 __FUNCTION__, strerror(-res), res);
14356 return false;
14357 }
14358
14359 pbcamera::StreamBuffer buffer;
14360 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014361 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014362 buffer.data = yuvBuffer->buffer;
14363 buffer.dataSize = yuvBuffer->frame_len;
14364
14365 pbcamera::CaptureRequest pbRequest;
14366 pbRequest.id = request.frame_number;
14367 pbRequest.outputBuffers.push_back(buffer);
14368
14369 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014370 res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014371 if (res != OK) {
14372 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14373 strerror(-res), res);
14374 return false;
14375 }
14376
14377 hdrPlusRequest->yuvBuffer = yuvBuffer;
14378 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14379
14380 return true;
14381}
14382
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014383status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked() {
14384 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14385 return OK;
14386 }
14387
14388 status_t res = gEaselManagerClient.openHdrPlusClientAsync(this);
14389 if (res != OK) {
14390 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14391 strerror(-res), res);
14392 return res;
14393 }
14394 gHdrPlusClientOpening = true;
14395
14396 return OK;
14397}
14398
Chien-Yu Chenee335912017-02-09 17:53:20 -080014399status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14400{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014401 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014402
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014403 // Check if gHdrPlusClient is opened or being opened.
14404 if (gHdrPlusClient == nullptr) {
14405 if (gHdrPlusClientOpening) {
14406 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14407 return OK;
14408 }
14409
14410 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014411 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014412 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14413 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014414 return res;
14415 }
14416
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014417 // When opening HDR+ client completes, HDR+ mode will be enabled.
14418 return OK;
14419
Chien-Yu Chenee335912017-02-09 17:53:20 -080014420 }
14421
14422 // Configure stream for HDR+.
14423 res = configureHdrPlusStreamsLocked();
14424 if (res != OK) {
14425 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014426 return res;
14427 }
14428
14429 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14430 res = gHdrPlusClient->setZslHdrPlusMode(true);
14431 if (res != OK) {
14432 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014433 return res;
14434 }
14435
14436 mHdrPlusModeEnabled = true;
14437 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14438
14439 return OK;
14440}
14441
14442void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14443{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014444 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014445 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014446 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14447 if (res != OK) {
14448 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14449 }
Chien-Yu Chenee335912017-02-09 17:53:20 -080014450 }
14451
14452 mHdrPlusModeEnabled = false;
14453 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14454}
14455
14456status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014457{
14458 pbcamera::InputConfiguration inputConfig;
14459 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
14460 status_t res = OK;
14461
14462 // Configure HDR+ client streams.
14463 // Get input config.
14464 if (mHdrPlusRawSrcChannel) {
14465 // HDR+ input buffers will be provided by HAL.
14466 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
14467 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
14468 if (res != OK) {
14469 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
14470 __FUNCTION__, strerror(-res), res);
14471 return res;
14472 }
14473
14474 inputConfig.isSensorInput = false;
14475 } else {
14476 // Sensor MIPI will send data to Easel.
14477 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080014478 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014479 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
14480 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
14481 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
14482 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
14483 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
14484 if (mSensorModeInfo.num_raw_bits != 10) {
14485 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
14486 mSensorModeInfo.num_raw_bits);
14487 return BAD_VALUE;
14488 }
14489
14490 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014491 }
14492
14493 // Get output configurations.
14494 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080014495 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014496
14497 // Easel may need to output YUV output buffers if mPictureChannel was created.
14498 pbcamera::StreamConfiguration yuvOutputConfig;
14499 if (mPictureChannel != nullptr) {
14500 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
14501 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
14502 if (res != OK) {
14503 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
14504 __FUNCTION__, strerror(-res), res);
14505
14506 return res;
14507 }
14508
14509 outputStreamConfigs.push_back(yuvOutputConfig);
14510 }
14511
14512 // TODO: consider other channels for YUV output buffers.
14513
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014514 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014515 if (res != OK) {
14516 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
14517 strerror(-res), res);
14518 return res;
14519 }
14520
14521 return OK;
14522}
14523
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014524void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client) {
14525 if (client == nullptr) {
14526 ALOGE("%s: Opened client is null.", __FUNCTION__);
14527 return;
14528 }
14529
14530 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
14531
14532 Mutex::Autolock l(gHdrPlusClientLock);
14533 gHdrPlusClient = std::move(client);
14534 gHdrPlusClientOpening = false;
14535
14536 // Set static metadata.
14537 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
14538 if (res != OK) {
14539 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
14540 __FUNCTION__, strerror(-res), res);
14541 gEaselManagerClient.closeHdrPlusClient(std::move(gHdrPlusClient));
14542 gHdrPlusClient = nullptr;
14543 return;
14544 }
14545
14546 // Enable HDR+ mode.
14547 res = enableHdrPlusModeLocked();
14548 if (res != OK) {
14549 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
14550 }
14551}
14552
14553void QCamera3HardwareInterface::onOpenFailed(status_t err) {
14554 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
14555 Mutex::Autolock l(gHdrPlusClientLock);
14556 gHdrPlusClientOpening = false;
14557}
14558
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014559void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
14560 const camera_metadata_t &resultMetadata) {
14561 if (result != nullptr) {
14562 if (result->outputBuffers.size() != 1) {
14563 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
14564 result->outputBuffers.size());
14565 return;
14566 }
14567
14568 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
14569 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
14570 result->outputBuffers[0].streamId);
14571 return;
14572 }
14573
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014574 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014575 HdrPlusPendingRequest pendingRequest;
14576 {
14577 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14578 auto req = mHdrPlusPendingRequests.find(result->requestId);
14579 pendingRequest = req->second;
14580 }
14581
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014582 // Update the result metadata with the settings of the HDR+ still capture request because
14583 // the result metadata belongs to a ZSL buffer.
14584 CameraMetadata metadata;
14585 metadata = &resultMetadata;
14586 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
14587 camera_metadata_t* updatedResultMetadata = metadata.release();
14588
14589 QCamera3PicChannel *picChannel =
14590 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
14591
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014592 // Check if dumping HDR+ YUV output is enabled.
14593 char prop[PROPERTY_VALUE_MAX];
14594 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
14595 bool dumpYuvOutput = atoi(prop);
14596
14597 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014598 // Dump yuv buffer to a ppm file.
14599 pbcamera::StreamConfiguration outputConfig;
14600 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
14601 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
14602 if (rc == OK) {
14603 char buf[FILENAME_MAX] = {};
14604 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
14605 result->requestId, result->outputBuffers[0].streamId,
14606 outputConfig.image.width, outputConfig.image.height);
14607
14608 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
14609 } else {
14610 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
14611 __FUNCTION__, strerror(-rc), rc);
14612 }
14613 }
14614
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014615 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
14616 auto halMetadata = std::make_shared<metadata_buffer_t>();
14617 clear_metadata_buffer(halMetadata.get());
14618
14619 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
14620 // encoding.
14621 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
14622 halStreamId, /*minFrameDuration*/0);
14623 if (res == OK) {
14624 // Return the buffer to pic channel for encoding.
14625 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
14626 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
14627 halMetadata);
14628 } else {
14629 // Return the buffer without encoding.
14630 // TODO: This should not happen but we may want to report an error buffer to camera
14631 // service.
14632 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
14633 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
14634 strerror(-res), res);
14635 }
14636
14637 // Send HDR+ metadata to framework.
14638 {
14639 pthread_mutex_lock(&mMutex);
14640
14641 // updatedResultMetadata will be freed in handlePendingResultsWithLock.
14642 handlePendingResultsWithLock(result->requestId, updatedResultMetadata);
14643 pthread_mutex_unlock(&mMutex);
14644 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014645
14646 // Remove the HDR+ pending request.
14647 {
14648 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14649 auto req = mHdrPlusPendingRequests.find(result->requestId);
14650 mHdrPlusPendingRequests.erase(req);
14651 }
14652 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014653}
14654
Chien-Yu Chene687bd02016-12-07 18:30:26 -080014655void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
14656 // TODO: Handle HDR+ capture failures and send the failure to framework.
14657 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
14658 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
14659
14660 // Return the buffer to pic channel.
14661 QCamera3PicChannel *picChannel =
14662 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
14663 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
14664
14665 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014666}
14667
Thierry Strudel3d639192016-09-09 11:52:26 -070014668}; //end namespace qcamera